diff --git a/.buildkite/hooks/pre-command b/.buildkite/hooks/pre-command index 40fb970a76196..b6b730fc3de8b 100644 --- a/.buildkite/hooks/pre-command +++ b/.buildkite/hooks/pre-command @@ -92,7 +92,7 @@ fi # Initialize the build scan and gobld annotations with empty/open
tags # This ensures that they are collapsible when they get appended to -if [[ "${BUILDKITE_LABEL:-}" == *"Pipeline upload"* ]]; then +if [[ "${BUILDKITE_LABEL:-}" == *"Pipeline upload"* || "${BUILDKITE_LABEL:-}" == *"Upload Pipeline"* ]]; then cat << EOF | buildkite-agent annotate --context "gradle-build-scans" --style "info"
diff --git a/.buildkite/pipelines/periodic.template.yml b/.buildkite/pipelines/periodic.template.yml index ec3ae76ffcdfb..08ba9529eb882 100644 --- a/.buildkite/pipelines/periodic.template.yml +++ b/.buildkite/pipelines/periodic.template.yml @@ -89,7 +89,7 @@ steps: GRADLE_TASK: "{{matrix.GRADLE_TASK}}" - label: release-tests command: .buildkite/scripts/release-tests.sh - timeout_in_minutes: 300 + timeout_in_minutes: 360 agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 9afb088a5a50e..6f417e8f8ca84 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -1150,7 +1150,7 @@ steps: GRADLE_TASK: "{{matrix.GRADLE_TASK}}" - label: release-tests command: .buildkite/scripts/release-tests.sh - timeout_in_minutes: 300 + timeout_in_minutes: 360 agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+bwc-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+bwc-trigger.yml deleted file mode 100644 index 291ed41a5facf..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+bwc-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+bwc - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/8 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml index f4cadb7bad693..2eeb08c6cff65 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+bwc.yml @@ -1,9 +1,8 @@ --- -jjbb-template: matrix-gradle-unix.yml +jjbb-template: matrix-gradle-unix-disabled.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+periodic+bwc - job-display-name: "elastic / elasticsearch # %BRANCH% - backwards compatibility matrix" - - job-description: "Testing of the Elasticsearch %BRANCH% branch backwards compatibility matrix.\n" - matrix-yaml-file: ".ci/bwcVersions" - matrix-variable: BWC_VERSION - gradle-args: "-Dbwc.checkout.align=true v$BWC_VERSION#bwcTest" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests-trigger.yml deleted file mode 100644 index 1816e7143355f..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+concurrent-search-tests - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests.yml index 99758cb9d088c..ad48635654459 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+concurrent-search-tests.yml @@ -2,7 +2,8 @@ - job: name: elastic+elasticsearch+%BRANCH%+periodic+concurrent-search-tests display-name: "elastic / elasticsearch # %BRANCH% - concurrent search tests" - description: "Testing concurrent search enabled for the Elasticsearch %BRANCH% branch.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true node: "general-purpose && docker" builders: - inject: diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+ear-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+ear-trigger.yml deleted file mode 100644 index a50a1f96358ad..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+ear-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+ear - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+ear.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+ear.yml index b1b5a39f92a5a..67462d3a2a809 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+ear.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+ear.yml @@ -2,7 +2,8 @@ - job: name: elastic+elasticsearch+%BRANCH%+periodic+ear display-name: "elastic / elasticsearch # %BRANCH% - encryption at rest" - description: "The Elasticsearch %BRANCH% branch encryption at rest compatibility tests.\n\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true node: packaging-large builders: - inject: diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness-trigger.yml deleted file mode 100644 index 986c52a137de3..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+eql-correctness - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/8 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness.yml index 2652732974661..a23bae19134fc 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+eql-correctness.yml @@ -3,7 +3,8 @@ name: elastic+elasticsearch+%BRANCH%+periodic+eql-correctness workspace: /dev/shm/elastic+elasticsearch+%BRANCH%+periodic+eql-correctness display-name: "elastic / elasticsearch # %BRANCH% - eql correctness tests" - description: "Testing of Elasticsearch %BRANCH% EQL.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true builders: - inject: properties-file: '.ci/java-versions.properties' diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins-trigger.yml deleted file mode 100644 index 909d175f11882..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+example-plugins - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins.yml index 2423a85b2a6bd..ee496690e82ce 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+example-plugins.yml @@ -3,7 +3,8 @@ name: elastic+elasticsearch+%BRANCH%+periodic+example-plugins workspace: /dev/shm/elastic+elasticsearch+%BRANCH%+periodic+example-plugins display-name: "elastic / elasticsearch # %BRANCH% - example plugin tests" - description: "Testing of Elasticsearch %BRANCH% example plugins.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true builders: - inject: properties-file: '.ci/java-versions.properties' diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix-trigger.yml deleted file mode 100644 index fb2a23855cc9f..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+java-fips-matrix - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix.yml index cc6f2d38d5918..b1fd03c08208c 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+java-fips-matrix.yml @@ -2,7 +2,8 @@ - job: name: "elastic+elasticsearch+%BRANCH%+periodic+java-fips-matrix" display-name: "elastic / elasticsearch # %BRANCH% - java fips compatibility matrix" - description: "Testing of the Elasticsearch %BRANCH% branch java FIPS compatibility matrix.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true project-type: matrix child-workspace: /dev/shm/elastic+elasticsearch+%BRANCH%+periodic+java-fips-matrix node: master diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix-trigger.yml deleted file mode 100644 index 8de3326dd819d..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+java-matrix - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix.yml index 07f4a8c5b6760..963e72b81f305 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+java-matrix.yml @@ -2,7 +2,8 @@ - job: name: "elastic+elasticsearch+%BRANCH%+periodic+java-matrix" display-name: "elastic / elasticsearch # %BRANCH% - java compatibility matrix" - description: "Testing of the Elasticsearch %BRANCH% branch java compatibility matrix.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true project-type: matrix child-workspace: /dev/shm/elastic+elasticsearch+%BRANCH%+periodic+java-matrix node: master diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests-trigger.yml deleted file mode 100644 index d8c8b557e4514..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+packaging-tests - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/8 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests.yml index a68641f50a174..e6f6cb5c3771b 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+packaging-tests.yml @@ -2,7 +2,8 @@ - job: name: elastic+elasticsearch+%BRANCH%+periodic+packaging-tests display-name: "elastic / elasticsearch # %BRANCH% - packaging tests" - description: "Testing of the Elasticsearch %BRANCH% branch packaging tests.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true project-type: multijob node: master vault: [] diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests-trigger.yml deleted file mode 100644 index c624c929b3dd6..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+release-tests - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml index 5fc2d5cd1ca5d..abaf4242e1648 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+release-tests.yml @@ -4,7 +4,8 @@ # Don't use ramdisk since this build generates lots of large artifacts and results in oomkiller issues # workspace: /dev/shm/elastic+elasticsearch+%BRANCH%+periodic+release-tests display-name: "elastic / elasticsearch # %BRANCH% - release tests" - description: "Release version tests for the Elasticsearch %BRANCH% branch.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true node: "general-purpose && docker" builders: - inject: diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml index c67baa07da1ee..02240bf1bb339 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-node-tests.yml @@ -2,7 +2,8 @@ - job: name: elastic+elasticsearch+%BRANCH%+periodic+single-processor-node-tests display-name: "elastic / elasticsearch # %BRANCH% - single processor node tests" - description: "Testing with node.processors set to '1' for the Elasticsearch %BRANCH% branch.\n" + description: "This job has been migrated to Buildkite.\n" + disabled: true node: "general-purpose && docker" builders: - inject: diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-tests-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-tests-trigger.yml deleted file mode 100644 index 40ad9e9dd5446..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+single-processor-tests-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+periodic+single-processor-node-tests - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H/12 * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring-trigger.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring-trigger.yml deleted file mode 100644 index fa0f06c3315af..0000000000000 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring-trigger.yml +++ /dev/null @@ -1,6 +0,0 @@ ---- -jjbb-template: periodic-trigger-lgc.yml -vars: - - periodic-job: elastic+elasticsearch+%BRANCH%+snyk-dependency-monitoring - - lgc-job: elastic+elasticsearch+%BRANCH%+intake - - cron: "H H * * *" diff --git a/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring.yml b/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring.yml index 1a76003f3d13c..6190937cc6490 100644 --- a/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring.yml +++ b/.ci/jobs.t/elastic+elasticsearch+periodic+snyk-dependency-monitoring.yml @@ -3,7 +3,8 @@ name: elastic+elasticsearch+%BRANCH%+snyk-dependency-monitoring workspace: /dev/shm/elastic+elasticsearch+%BRANCH%+snyk-dependency-monitoring display-name: "elastic / elasticsearch # %BRANCH% - snyk dependency monitoring" - description: "Publishing of the Elasticsearch %BRANCH% dependencies graph to snyk dependency monitoring" + description: "This job has been migrated to Buildkite.\n" + disabled: true builders: - inject: properties-file: '.ci/java-versions.properties' diff --git a/.ci/templates.t/matrix-gradle-unix-disabled.yml b/.ci/templates.t/matrix-gradle-unix-disabled.yml new file mode 100644 index 0000000000000..1eafe77a5ec78 --- /dev/null +++ b/.ci/templates.t/matrix-gradle-unix-disabled.yml @@ -0,0 +1,32 @@ +--- +- job: + name: "{job-name}" + display-name: "{job-display-name}" + description: "This job has been migrated to Buildkite.\n" + disabled: true + project-type: matrix + child-workspace: /dev/shm/{job-name} + node: master + scm: + - git: + wipe-workspace: false + axes: + - axis: + type: slave + name: nodes + values: + - "general-purpose" + - axis: + type: yaml + filename: "{matrix-yaml-file}" + name: "{matrix-variable}" + builders: + - inject: + properties-file: ".ci/java-versions.properties" + properties-content: | + JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA + JAVA11_HOME=$HOME/.java/java11 + JAVA16_HOME=$HOME/.java/openjdk16 + - shell: | + #!/usr/local/bin/runbld --redirect-stderr + $WORKSPACE/.ci/scripts/run-gradle.sh {gradle-args} diff --git a/TESTING.asciidoc b/TESTING.asciidoc index d33121a15dcf7..0393cf92776fa 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -108,6 +108,8 @@ password: `elastic-password`. - In order to set an Elasticsearch setting, provide a setting with the following prefix: `-Dtests.es.` - In order to pass a JVM setting, e.g. to disable assertions: `-Dtests.jvm.argline="-da"` - In order to use HTTPS: ./gradlew run --https +- In order to start a mock logging APM server on port 9999 and configure ES cluster to connect to it, +use `./gradlew run --with-apm-server` ==== Customizing the test cluster for ./gradlew run @@ -487,136 +489,16 @@ If in doubt about which command to use, simply run :check == Testing packaging -The packaging tests use Vagrant virtual machines or cloud instances to verify +The packaging tests are run on different build vm cloud instances to verify that installing and running Elasticsearch distributions works correctly on supported operating systems. These tests should really only be run on ephemeral systems because they're destructive; that is, these tests install and remove packages and freely modify system settings, so you will probably regret it if you execute them on your development machine. -When you run a packaging test, Gradle will set up the target VM and mount your -repository directory in the VM. Once this is done, a Gradle task will issue a -Vagrant command to run a *nested* Gradle task on the VM. This nested Gradle -runs the actual "destructive" test classes. - -. Install Virtual Box and Vagrant. -+ -. (Optional) Install https://github.com/fgrehm/vagrant-cachier[vagrant-cachier] to squeeze -a bit more performance out of the process: -+ --------------------------------------- -vagrant plugin install vagrant-cachier --------------------------------------- -+ -. You can run all of the OS packaging tests with `./gradlew packagingTest`. -This task includes our legacy `bats` tests. To run only the OS tests that are -written in Java, run `.gradlew distroTest`, will cause Gradle to build the tar, -zip, and deb packages and all the plugins. It will then run the tests on every -available system. This will take a very long time. -+ -Fortunately, the various systems under test have their own Gradle tasks under -`qa/os`. To find the systems tested, do a listing of the `qa/os` directory. -To find out what packaging combinations can be tested on a system, run -the `tasks` task. For example: -+ ----------------------------------- -./gradlew :qa:os:ubuntu-1804:tasks ----------------------------------- -+ -If you want a quick test of the tarball and RPM packagings for Centos 7, you -would run: -+ -------------------------------------------------------------------------------------------------- -./gradlew :qa:os:centos-7:distroTest.default-rpm :qa:os:centos-7:distroTest.default-linux-archive -------------------------------------------------------------------------------------------------- - -Note that if you interrupt Gradle in the middle of running these tasks, any boxes started -will remain running and you'll have to stop them manually with `./gradlew --stop` or -`vagrant halt`. - -All the regular vagrant commands should just work so you can get a shell in a -VM running trusty by running -`vagrant up ubuntu-1804 --provider virtualbox && vagrant ssh ubuntu-1804`. - -=== Testing packaging on Windows - -The packaging tests also support Windows Server 2012R2 and Windows Server 2016. -Unfortunately we're not able to provide boxes for them in open source use -because of licensing issues. Any Virtualbox image that has WinRM and Powershell -enabled for remote users should work. - -Specify the image IDs of the Windows boxes to gradle with the following project -properties. They can be set in `~/.gradle/gradle.properties` like - ------------------------------------- -vagrant.windows-2012r2.id=my-image-id -vagrant.windows-2016.id=another-image-id ------------------------------------- - -or passed on the command line like `-Pvagrant.windows-2012r2.id=my-image-id` -`-Pvagrant.windows-2016=another-image-id` - -These properties are required for Windows support in all gradle tasks that -handle packaging tests. Either or both may be specified. - -If you're running vagrant commands outside of gradle, specify the Windows boxes -with the environment variables - -* `VAGRANT_WINDOWS_2012R2_BOX` -* `VAGRANT_WINDOWS_2016_BOX` - -=== Testing VMs are disposable - -It's important to think of VMs like cattle. If they become lame you just shoot -them and let vagrant reprovision them. Say you've hosed your precise VM: +=== Reproducing packaging tests ----------------------------------------------------- -vagrant ssh ubuntu-1604 -c 'sudo rm -rf /bin'; echo oops ----------------------------------------------------- - -All you've got to do to get another one is - ----------------------------------------------- -vagrant destroy -f ubuntu-1604 && vagrant up ubuntu-1604 --provider virtualbox ----------------------------------------------- - -The whole process takes a minute and a half on a modern laptop, two and a half -without vagrant-cachier. - -It's possible that some downloads will fail and it'll be impossible to restart -them. This is a bug in vagrant. See the instructions here for how to work -around it: -https://github.com/mitchellh/vagrant/issues/4479 - -Some vagrant commands will work on all VMs at once: - ------------------- -vagrant halt -vagrant destroy -f ------------------- - -`vagrant up` would normally start all the VMs but we've prevented that because -that'd consume a ton of ram. - -=== Iterating on packaging tests - -Because our packaging tests are capable of testing many combinations of OS -(e.g., Windows, Linux, etc.), package type (e.g., zip file, RPM, etc.), -Elasticsearch distribution type (e.g., default or OSS), and so forth, it's -faster to develop against smaller subsets of the tests. For example, to run -tests for the default archive distribution on Fedora 28: - ------------------------------------------------------------ -./gradlew :qa:os:fedora-28:distroTest.default-linux-archive ------------------------------------------------------------ - -These test tasks can use the `--tests`, `--info`, and `--debug` parameters just like -non-OS tests can. For example: - ------------------------------------------------------------ -./gradlew :qa:os:fedora-28:distroTest.default-linux-archive \ - --tests "com.elasticsearch.packaging.test.ArchiveTests" ------------------------------------------------------------ +To reproduce or debug packaging tests failures we recommend using using our provided https://github.com/elastic/elasticsearch-infra/blob/master/buildkite-tools/README.md[*buildkite tools*] == Testing backwards compatibility diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java index 9c527923fae02..9fa876a00c35c 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java @@ -19,33 +19,25 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.LuceneSourceOperator; -import org.elasticsearch.compute.lucene.ValueSourceInfo; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.topn.TopNOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.fielddata.FieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fielddata.IndexNumericFieldData; -import org.elasticsearch.index.fielddata.plain.SortedDoublesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; -import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.script.field.KeywordDocValuesField; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; -import org.elasticsearch.search.aggregations.support.FieldContext; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; @@ -101,54 +93,18 @@ public class ValuesSourceReaderBenchmark { } } - private static ValueSourceInfo info(IndexReader reader, String name) { + private static BlockLoader blockLoader(String name) { return switch (name) { - case "long" -> numericInfo(reader, name, IndexNumericFieldData.NumericType.LONG, ElementType.LONG); - case "int" -> numericInfo(reader, name, IndexNumericFieldData.NumericType.INT, ElementType.INT); - case "double" -> { - SortedDoublesIndexFieldData fd = new SortedDoublesIndexFieldData( - name, - IndexNumericFieldData.NumericType.DOUBLE, - CoreValuesSourceType.NUMERIC, - null - ); - FieldContext context = new FieldContext(name, fd, null); - yield new ValueSourceInfo( - CoreValuesSourceType.NUMERIC, - CoreValuesSourceType.NUMERIC.getField(context, null), - ElementType.DOUBLE, - reader - ); - } - case "keyword" -> { - SortedSetOrdinalsIndexFieldData fd = new SortedSetOrdinalsIndexFieldData( - new IndexFieldDataCache.None(), - "keyword", - CoreValuesSourceType.KEYWORD, - new NoneCircuitBreakerService(), - (dv, n) -> new KeywordDocValuesField(FieldData.toString(dv), n) - ); - FieldContext context = new FieldContext(name, fd, null); - yield new ValueSourceInfo( - CoreValuesSourceType.KEYWORD, - CoreValuesSourceType.KEYWORD.getField(context, null), - ElementType.BYTES_REF, - reader - ); - } + case "long" -> numericBlockLoader(name, NumberFieldMapper.NumberType.LONG); + case "int" -> numericBlockLoader(name, NumberFieldMapper.NumberType.INTEGER); + case "double" -> numericBlockLoader(name, NumberFieldMapper.NumberType.DOUBLE); + case "keyword" -> new KeywordFieldMapper.KeywordFieldType(name).blockLoader(null); default -> throw new IllegalArgumentException("can't read [" + name + "]"); }; } - private static ValueSourceInfo numericInfo( - IndexReader reader, - String name, - IndexNumericFieldData.NumericType numericType, - ElementType elementType - ) { - SortedNumericIndexFieldData fd = new SortedNumericIndexFieldData(name, numericType, CoreValuesSourceType.NUMERIC, null); - FieldContext context = new FieldContext(name, fd, null); - return new ValueSourceInfo(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.NUMERIC.getField(context, null), elementType, reader); + private static BlockLoader numericBlockLoader(String name, NumberFieldMapper.NumberType numberType) { + return new NumberFieldMapper.NumberFieldType(name, numberType).blockLoader(null); } /** @@ -176,7 +132,12 @@ private static ValueSourceInfo numericInfo( @Benchmark @OperationsPerInvocation(INDEX_SIZE) public void benchmark() { - ValuesSourceReaderOperator op = new ValuesSourceReaderOperator(List.of(info(reader, name)), 0, name); + ValuesSourceReaderOperator op = new ValuesSourceReaderOperator( + BlockFactory.getNonBreakingInstance(), + List.of(BlockReaderFactories.loaderToFactory(reader, blockLoader(name))), + 0, + name + ); long sum = 0; for (Page page : pages) { op.addInput(page); @@ -203,13 +164,24 @@ public void benchmark() { BytesRef scratch = new BytesRef(); BytesRefVector values = op.getOutput().getBlock(1).asVector(); for (int p = 0; p < values.getPositionCount(); p++) { - sum += Integer.parseInt(values.getBytesRef(p, scratch).utf8ToString()); + BytesRef r = values.getBytesRef(p, scratch); + r.offset++; + r.length--; + sum += Integer.parseInt(r.utf8ToString()); } } } } - long expected = INDEX_SIZE; - expected = expected * (expected - 1) / 2; + long expected; + if (name.equals("keyword")) { + expected = 0; + for (int i = 0; i < INDEX_SIZE; i++) { + expected += i % 1000; + } + } else { + expected = INDEX_SIZE; + expected = expected * (expected - 1) / 2; + } if (expected != sum) { throw new AssertionError("[" + layout + "][" + name + "] expected [" + expected + "] but was [" + sum + "]"); } @@ -225,16 +197,13 @@ private void setupIndex() throws IOException { directory = new ByteBuffersDirectory(); try (IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE))) { for (int i = 0; i < INDEX_SIZE; i++) { + String c = Character.toString('a' - ((i % 1000) % 26) + 26); iw.addDocument( List.of( new NumericDocValuesField("long", i), new NumericDocValuesField("int", i), new NumericDocValuesField("double", NumericUtils.doubleToSortableLong(i)), - new KeywordFieldMapper.KeywordField( - "keyword", - new BytesRef(Integer.toString(i)), - KeywordFieldMapper.Defaults.FIELD_TYPE - ) + new KeywordFieldMapper.KeywordField("keyword", new BytesRef(c + i % 1000), KeywordFieldMapper.Defaults.FIELD_TYPE) ) ); if (i % COMMIT_INTERVAL == 0) { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle index d6984e40a5ea1..e6bbaeb19e495 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.build-scan.gradle @@ -95,10 +95,10 @@ buildScan { // Disable async upload in CI to ensure scan upload completes before CI agent is terminated uploadInBackground = false - def branch = System.getenv('BUILDKITE_BRANCH') + def branch = System.getenv('BUILDKITE_PULL_REQUEST_BASE_BRANCH') ?: System.getenv('BUILDKITE_BRANCH') def repoMatcher = System.getenv('BUILDKITE_REPO') =~ /(https:\/\/github\.com\/|git@github\.com:)(\S+)\.git/ def repository = repoMatcher.matches() ? repoMatcher.group(2) : "" - def jobName = (System.getenv('BUILDKITE_LABEL') ?: '').replaceAll(/[^a-zA-Z0-9_\-]+/, '_').toLowerCase() + def jobName = (System.getenv('BUILDKITE_LABEL') ?: '').replaceAll(/[^a-zA-Z0-9_\-]+/, ' ').trim().replaceAll(' ', '_').toLowerCase() tag 'CI' link 'CI Build', buildKiteUrl @@ -110,6 +110,11 @@ buildScan { value 'Job Name', jobName tag jobName + if (branch) { + tag branch + value 'Git Branch', branch + } + // Add SCM information def prId = System.getenv('BUILDKITE_PULL_REQUEST') if (prId != 'false') { @@ -122,7 +127,6 @@ buildScan { } else { value 'Git Commit ID', BuildParams.gitRevision link 'Source', "https://github.com/${repository}/tree/${BuildParams.gitRevision}" - tag branch } buildScanPublished { scan -> diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java index bb12bf95847b7..d71c893cdd20f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java @@ -17,8 +17,13 @@ import org.gradle.api.GradleException; import org.gradle.api.Project; import org.gradle.api.logging.LogLevel; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.tasks.TaskProvider; +import org.gradle.jvm.toolchain.JavaLanguageVersion; +import org.gradle.jvm.toolchain.JavaToolchainService; +import org.gradle.jvm.toolchain.JvmVendorSpec; import java.io.File; import java.io.IOException; @@ -26,8 +31,6 @@ import java.util.List; import java.util.Locale; -import static org.elasticsearch.gradle.internal.util.JavaUtil.getJavaHome; - /** * By registering bwc tasks via this extension we can support declaring custom bwc tasks from the build script * without relying on groovy closures and sharing common logic for tasks created by the BwcSetup plugin already. @@ -37,16 +40,22 @@ public class BwcSetupExtension { private static final String MINIMUM_COMPILER_VERSION_PATH = "src/main/resources/minimumCompilerVersion"; private static final Version BUILD_TOOL_MINIMUM_VERSION = Version.fromString("7.14.0"); private final Project project; + private final ObjectFactory objectFactory; + private final JavaToolchainService toolChainService; private final Provider unreleasedVersionInfo; private Provider checkoutDir; public BwcSetupExtension( Project project, + ObjectFactory objectFactory, + JavaToolchainService toolChainService, Provider unreleasedVersionInfo, Provider checkoutDir ) { this.project = project; + this.objectFactory = objectFactory; + this.toolChainService = toolChainService; this.unreleasedVersionInfo = unreleasedVersionInfo; this.checkoutDir = checkoutDir; } @@ -137,4 +146,14 @@ private static String readFromFile(File file) { throw new GradleException("Cannot read java properties file.", ioException); } } + + /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */ + public String getJavaHome(final int version) { + Property value = objectFactory.property(JavaLanguageVersion.class).value(JavaLanguageVersion.of(version)); + return toolChainService.launcherFor(javaToolchainSpec -> { + javaToolchainSpec.getLanguageVersion().value(value); + javaToolchainSpec.getVendor().set(JvmVendorSpec.ORACLE); + }).get().getMetadata().getInstallationPath().getAsFile().getAbsolutePath(); + } + } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/GenerateProviderManifest.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/GenerateProviderManifest.java new file mode 100644 index 0000000000000..621210cd935b2 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/GenerateProviderManifest.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal; + +import org.elasticsearch.gradle.util.FileUtils; +import org.gradle.api.DefaultTask; +import org.gradle.api.file.ConfigurableFileCollection; +import org.gradle.api.file.RegularFileProperty; +import org.gradle.api.tasks.Classpath; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.OutputFile; +import org.gradle.api.tasks.TaskAction; + +import java.io.File; +import java.util.stream.Collectors; + +import javax.inject.Inject; + +abstract class GenerateProviderManifest extends DefaultTask { + + @Inject + public GenerateProviderManifest() {} + + @Classpath + @InputFiles + abstract public ConfigurableFileCollection getProviderImplClasspath(); + + @OutputFile + abstract RegularFileProperty getManifestFile(); + + @TaskAction + void generateManifest() { + File manifestFile = getManifestFile().get().getAsFile(); + manifestFile.getParentFile().mkdirs(); + FileUtils.write(manifestFile, generateManifestContent(), "UTF-8"); + } + + private String generateManifestContent() { + return getProviderImplClasspath().getFiles().stream().map(File::getName).sorted().collect(Collectors.joining("\n")); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java index 768cac92a3dbd..d51770ffd30ed 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java @@ -82,6 +82,8 @@ public void apply(Project project) { }); TaskProvider addRemoteTaskProvider = tasks.register("addRemote", addRemote -> { + String rootProjectName = project.getRootProject().getName(); + addRemote.dependsOn(findRemoteTaskProvider); addRemote.onlyIf("remote exists", task -> ((boolean) extraProperties.get("remoteExists")) == false); addRemote.doLast(new Action() { @@ -99,7 +101,7 @@ public void execute(Task task) { : null ) ) - .getOrElse("https://github.com/" + remoteRepo + "/" + project.getRootProject().getName()); + .getOrElse("https://github.com/" + remoteRepo + "/" + rootProjectName); spec.commandLine("git", "remote", "add", remoteRepo, remoteRepoUrl); }); } @@ -127,6 +129,7 @@ public void execute(Task task) { String projectPath = project.getPath(); TaskProvider checkoutBwcBranchTaskProvider = tasks.register("checkoutBwcBranch", checkoutBwcBranch -> { checkoutBwcBranch.dependsOn(fetchLatestTaskProvider); + ExtraPropertiesExtension taskExtensionsProperties = checkoutBwcBranch.getExtensions().getExtraProperties(); checkoutBwcBranch.doLast(new Action() { @Override public void execute(Task task) { @@ -136,9 +139,7 @@ public void execute(Task task) { .orElse(providerFactory.systemProperty("tests.bwc.refspec." + bwcBranch)) .orElse( providerFactory.provider( - () -> task.getExtensions().getExtraProperties().has("refspec") - ? task.getExtensions().getExtraProperties().get("refspec").toString() - : null + () -> taskExtensionsProperties.has("refspec") ? taskExtensionsProperties.get("refspec").toString() : null ) ) .getOrElse(remote.get() + "/" + bwcBranch); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java index c4bb331b7de0a..2468711561ae4 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionBwcSetupPlugin.java @@ -16,10 +16,13 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; +import org.gradle.api.model.ObjectFactory; +import org.gradle.api.plugins.JvmToolchainsPlugin; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.TaskProvider; +import org.gradle.jvm.toolchain.JavaToolchainService; import org.gradle.language.base.plugins.LifecycleBasePlugin; import java.io.File; @@ -44,16 +47,21 @@ */ public class InternalDistributionBwcSetupPlugin implements Plugin { + private final ObjectFactory objectFactory; private ProviderFactory providerFactory; + private JavaToolchainService toolChainService; @Inject - public InternalDistributionBwcSetupPlugin(ProviderFactory providerFactory) { + public InternalDistributionBwcSetupPlugin(ObjectFactory objectFactory, ProviderFactory providerFactory) { + this.objectFactory = objectFactory; this.providerFactory = providerFactory; } @Override public void apply(Project project) { project.getRootProject().getPluginManager().apply(GlobalBuildInfoPlugin.class); + project.getPlugins().apply(JvmToolchainsPlugin.class); + toolChainService = project.getExtensions().getByType(JavaToolchainService.class); BuildParams.getBwcVersions().forPreviousUnreleased((BwcVersions.UnreleasedVersionInfo unreleasedVersion) -> { configureBwcProject(project.project(unreleasedVersion.gradleProjectPath()), unreleasedVersion); }); @@ -63,7 +71,7 @@ private void configureBwcProject(Project project, BwcVersions.UnreleasedVersionI Provider versionInfoProvider = providerFactory.provider(() -> versionInfo); Provider checkoutDir = versionInfoProvider.map(info -> new File(project.getBuildDir(), "bwc/checkout-" + info.branch())); BwcSetupExtension bwcSetupExtension = project.getExtensions() - .create("bwcSetup", BwcSetupExtension.class, project, versionInfoProvider, checkoutDir); + .create("bwcSetup", BwcSetupExtension.class, project, objectFactory, toolChainService, versionInfoProvider, checkoutDir); BwcGitExtension gitExtension = project.getPlugins().apply(InternalBwcGitPlugin.class).getGitExtension(); Provider bwcVersion = versionInfoProvider.map(info -> info.version()); gitExtension.setBwcVersion(versionInfoProvider.map(info -> info.version())); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java index c87d7a6b0e730..f6d3787a4f686 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -29,7 +29,7 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { public TaskProvider createTask(Project project) { project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); - project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.5.1"); + project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.6"); Configuration jdkJarHellConfig = project.getConfigurations().create(JDK_JAR_HELL_CONFIG_NAME); if (project.getPath().equals(LIBS_ELASTICSEARCH_CORE_PROJECT_PATH) == false) { // Internal projects are not all plugins, so make sure the check is available diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index ada263853c07d..bcbe1740630ce 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -16,16 +16,10 @@ import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.InternalDistributionDownloadPlugin; -import org.elasticsearch.gradle.internal.Jdk; import org.elasticsearch.gradle.internal.JdkDownloadPlugin; -import org.elasticsearch.gradle.internal.conventions.GUtils; -import org.elasticsearch.gradle.internal.conventions.util.Util; import org.elasticsearch.gradle.internal.docker.DockerSupportPlugin; import org.elasticsearch.gradle.internal.docker.DockerSupportService; import org.elasticsearch.gradle.internal.info.BuildParams; -import org.elasticsearch.gradle.internal.vagrant.VagrantBasePlugin; -import org.elasticsearch.gradle.internal.vagrant.VagrantExtension; -import org.elasticsearch.gradle.internal.vagrant.VagrantMachine; import org.elasticsearch.gradle.test.SystemPropertyCommandLineArgumentProvider; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Action; @@ -36,27 +30,19 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.provider.Provider; import org.gradle.api.specs.Specs; -import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.testing.Test; -import org.gradle.initialization.layout.BuildLayout; -import java.io.File; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.function.Supplier; -import java.util.stream.Stream; - -import javax.inject.Inject; import static org.elasticsearch.gradle.distribution.ElasticsearchDistributionTypes.ARCHIVE; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.ALL_INTERNAL; @@ -67,32 +53,18 @@ import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_IRONBANK; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER_UBI; import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.RPM; -import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertLinuxPath; -import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertWindowsPath; /** * This class defines gradle tasks for testing our various distribution artifacts. */ public class DistroTestPlugin implements Plugin { - private static final String SYSTEM_JDK_VERSION = "17.0.2+8"; - private static final String SYSTEM_JDK_VENDOR = "adoptium"; - private static final String GRADLE_JDK_VERSION = "16.0.2+7"; - private static final String GRADLE_JDK_VENDOR = "adoptium"; // all distributions used by distro tests. this is temporary until tests are per distribution private static final String EXAMPLE_PLUGIN_CONFIGURATION = "examplePlugin"; - private static final String IN_VM_SYSPROP = "tests.inVM"; private static final String DISTRIBUTION_SYSPROP = "tests.distribution"; private static final String BWC_DISTRIBUTION_SYSPROP = "tests.bwc-distribution"; private static final String EXAMPLE_PLUGIN_SYSPROP = "tests.example-plugin"; - private final File rootDir; - - @Inject - public DistroTestPlugin(BuildLayout buildLayout) { - this.rootDir = buildLayout.getRootDirectory(); - } - @Override public void apply(Project project) { project.getRootProject().getPluginManager().apply(DockerSupportPlugin.class); @@ -118,16 +90,9 @@ public void apply(Project project) { List> windowsTestTasks = new ArrayList<>(); Map>> linuxTestTasks = new HashMap<>(); - Map>> upgradeTestTasks = new HashMap<>(); - Map> depsTasks = new HashMap<>(); for (ElasticsearchDistribution distribution : testDistributions) { String taskname = destructiveDistroTestTaskName(distribution); - TaskProvider depsTask = project.getTasks().register(taskname + "#deps"); - // explicitly depend on the archive not on the implicit extracted distribution - depsTask.configure(t -> t.dependsOn(distribution.getArchiveDependencies())); - depsTask.configure(t -> t.dependsOn(examplePlugin.getDependencies())); - depsTasks.put(taskname, depsTask); TaskProvider destructiveTask = configureTestTask(project, taskname, distribution, t -> { t.onlyIf( "Docker is not available", @@ -136,7 +101,7 @@ public void apply(Project project) { addDistributionSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath); addDistributionSysprop(t, EXAMPLE_PLUGIN_SYSPROP, () -> examplePlugin.getSingleFile().toString()); t.exclude("**/PackageUpgradeTests.class"); - }, depsTask); + }, distribution.getArchiveDependencies(), examplePlugin.getDependencies()); if (distribution.getPlatform() == Platform.WINDOWS) { windowsTestTasks.add(destructiveTask); @@ -164,82 +129,15 @@ public void apply(Project project) { } String upgradeTaskname = destructiveDistroUpgradeTestTaskName(distribution, version.toString()); - TaskProvider upgradeDepsTask = project.getTasks().register(upgradeTaskname + "#deps"); - upgradeDepsTask.configure(t -> t.dependsOn(distribution, bwcDistro)); - depsTasks.put(upgradeTaskname, upgradeDepsTask); TaskProvider upgradeTest = configureTestTask(project, upgradeTaskname, distribution, t -> { addDistributionSysprop(t, DISTRIBUTION_SYSPROP, distribution::getFilepath); addDistributionSysprop(t, BWC_DISTRIBUTION_SYSPROP, bwcDistro::getFilepath); t.include("**/PackageUpgradeTests.class"); - }, upgradeDepsTask); + }, distribution, bwcDistro); versionTasks.get(version.toString()).configure(t -> t.dependsOn(upgradeTest)); - upgradeTestTasks.computeIfAbsent(version.toString(), k -> new ArrayList<>()).add(upgradeTest); } } } - - // setup jdks used by no-jdk tests, and by gradle executing - TaskProvider linuxGradleJdk = createJdk(project, "gradle", GRADLE_JDK_VENDOR, GRADLE_JDK_VERSION, "linux", "x64"); - TaskProvider linuxSystemJdk = createJdk(project, "system", SYSTEM_JDK_VENDOR, SYSTEM_JDK_VERSION, "linux", "x64"); - TaskProvider windowsGradleJdk = createJdk(project, "gradle", GRADLE_JDK_VENDOR, GRADLE_JDK_VERSION, "windows", "x64"); - TaskProvider windowsSystemJdk = createJdk(project, "system", SYSTEM_JDK_VENDOR, SYSTEM_JDK_VERSION, "windows", "x64"); - - project.subprojects(vmProject -> { - vmProject.getPluginManager().apply(VagrantBasePlugin.class); - TaskProvider gradleJdk = isWindows(vmProject) ? windowsGradleJdk : linuxGradleJdk; - TaskProvider systemJdk = isWindows(vmProject) ? windowsSystemJdk : linuxSystemJdk; - configureVM(vmProject, rootDir, gradleJdk, systemJdk); - List vmDependencies = Arrays.asList( - gradleJdk, - systemJdk, - project.getConfigurations().getByName("testRuntimeClasspath") - ); - - Map> vmLifecyleTasks = lifecycleTasks(vmProject, "distroTest"); - Map> vmVersionTasks = versionTasks(vmProject, "distroUpgradeTest"); - TaskProvider distroTest = vmProject.getTasks().register("distroTest"); - - // windows boxes get windows distributions, and linux boxes get linux distributions - if (isWindows(vmProject)) { - configureVMWrapperTasks(vmProject, windowsTestTasks, depsTasks, wrapperTask -> { - vmLifecyleTasks.get(ARCHIVE).configure(t -> t.dependsOn(wrapperTask)); - }, vmDependencies); - } else { - for (var entry : linuxTestTasks.entrySet()) { - ElasticsearchDistributionType type = entry.getKey(); - TaskProvider vmLifecycleTask = vmLifecyleTasks.get(type); - configureVMWrapperTasks(vmProject, entry.getValue(), depsTasks, wrapperTask -> { - vmLifecycleTask.configure(t -> t.dependsOn(wrapperTask)); - - // Only VM sub-projects that are specifically opted-in to testing Docker should - // have the Docker task added as a dependency. Although we control whether Docker - // is installed in the VM via `Vagrantfile` and we could auto-detect its presence - // in the VM, the test tasks e.g. `destructiveDistroTest.default-docker` are defined - // on the host during Gradle's configuration phase and not in the VM, so - // auto-detection doesn't work. - // - // The shouldTestDocker property could be null, hence we use Boolean.TRUE.equals() - boolean shouldExecute = (type.isDocker()) || Boolean.TRUE.equals(vmProject.findProperty("shouldTestDocker")); - - if (shouldExecute) { - distroTest.configure(t -> t.dependsOn(wrapperTask)); - } - }, vmDependencies); - } - - for (var entry : upgradeTestTasks.entrySet()) { - String version = entry.getKey(); - TaskProvider vmVersionTask = vmVersionTasks.get(version); - configureVMWrapperTasks( - vmProject, - entry.getValue(), - depsTasks, - wrapperTask -> { vmVersionTask.configure(t -> t.dependsOn(wrapperTask)); }, - vmDependencies - ); - } - } - }); } private static Map> lifecycleTasks(Project project, String taskPrefix) { @@ -252,7 +150,6 @@ private static Map> lifecycleTask lifecyleTasks.put(ARCHIVE, project.getTasks().register(taskPrefix + ".archives")); lifecyleTasks.put(DEB, project.getTasks().register(taskPrefix + ".packages")); lifecyleTasks.put(RPM, lifecyleTasks.get(DEB)); - return lifecyleTasks; } @@ -266,67 +163,6 @@ private static Map> versionTasks(Project project, String return versionTasks; } - private static TaskProvider createJdk( - Project project, - String purpose, - String vendor, - String version, - String platform, - String architecture - ) { - Jdk jdk = JdkDownloadPlugin.getContainer(project).create(platform + "-" + purpose); - jdk.setVendor(vendor); - jdk.setVersion(version); - jdk.setPlatform(platform); - jdk.setArchitecture(architecture); - - String taskname = "copy" + GUtils.capitalize(platform) + GUtils.capitalize(purpose) + "Jdk"; - TaskProvider copyTask = project.getTasks().register(taskname, Copy.class); - copyTask.configure(t -> { - t.from(jdk); - t.into(new File(project.getBuildDir(), "jdks/" + platform + "-" + architecture + "-" + vendor + "-" + version)); - }); - return copyTask; - } - - private static void configureVM( - Project project, - File rootDir, - TaskProvider gradleJdkProvider, - TaskProvider systemJdkProvider - ) { - String box = project.getName(); - - // setup VM used by these tests - VagrantExtension vagrant = project.getExtensions().getByType(VagrantExtension.class); - vagrant.setBox(box); - - vagrant.vmEnv("SYSTEM_JAVA_HOME", convertPath(rootDir, vagrant, systemJdkProvider, "", "")); - // set java home for gradle to use. package tests will overwrite/remove this for each test case - vagrant.vmEnv("JAVA_HOME", convertPath(rootDir, vagrant, gradleJdkProvider, "", "")); - if (System.getenv("JENKINS_URL") != null) { - Stream.of("JOB_NAME", "JENKINS_URL", "BUILD_NUMBER", "BUILD_URL").forEach(name -> vagrant.vmEnv(name, System.getenv(name))); - } - vagrant.setIsWindowsVM(isWindows(project)); - } - - private static Object convertPath( - File rootDirectory, - VagrantExtension vagrant, - TaskProvider jdkProvider, - String additionaLinux, - String additionalWindows - ) { - return Util.toStringable(() -> { - String hostPath = jdkProvider.get().getDestinationDir().toString(); - if (vagrant.isWindowsVM()) { - return convertWindowsPath(rootDirectory, hostPath) + additionalWindows; - } else { - return convertLinuxPath(rootDirectory, hostPath) + additionaLinux; - } - }); - } - private static Configuration configureExamplePlugin(Project project) { Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION); examplePlugin.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); @@ -335,32 +171,6 @@ private static Configuration configureExamplePlugin(Project project) { return examplePlugin; } - private static void configureVMWrapperTasks( - Project project, - List> destructiveTasks, - Map> depsTasks, - Action> configure, - Object... additionalDeps - ) { - for (TaskProvider destructiveTask : destructiveTasks) { - String destructiveTaskName = destructiveTask.getName(); - String taskname = destructiveTaskName.substring("destructive".length()); - taskname = taskname.substring(0, 1).toLowerCase(Locale.ROOT) + taskname.substring(1); - TaskProvider vmTask = project.getTasks().register(taskname, GradleDistroTestTask.class, t -> { - t.setGroup(JavaBasePlugin.VERIFICATION_GROUP); - t.setDescription("Runs " + destructiveTaskName.split("\\.", 2)[1] + " tests within vagrant"); - t.setTaskName(destructiveTaskName); - t.extraArg("-D'" + IN_VM_SYSPROP + "'"); - t.dependsOn(depsTasks.get(destructiveTaskName)); - t.dependsOn(additionalDeps); - t.setLogLevel(project.getGradle().getStartParameter().getLogLevel().toString()); - t.setExtension(project.getExtensions().findByType(VagrantExtension.class)); - t.setService(project.getExtensions().getByType(VagrantMachine.class)); - }); - configure.execute(vmTask); - } - } - private static TaskProvider configureTestTask( Project project, String taskname, @@ -377,9 +187,7 @@ private static TaskProvider configureTestTask( t.setClasspath(testSourceSet.getRuntimeClasspath()); t.setTestClassesDirs(testSourceSet.getOutput().getClassesDirs()); t.setWorkingDir(project.getProjectDir()); - if (System.getProperty(IN_VM_SYSPROP) == null) { - t.dependsOn(deps); - } + t.dependsOn(deps); configure.execute(t); }); } @@ -438,11 +246,6 @@ private static ElasticsearchDistribution createDistro( return distro; } - // return true if the project is for a windows VM, false otherwise - private static boolean isWindows(Project project) { - return project.getName().contains("windows"); - } - private static String distroId(ElasticsearchDistributionType type, Platform platform, boolean bundledJdk, Architecture architecture) { return "default-" + (type == ARCHIVE ? platform + "-" : "") diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/GradleDistroTestTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/GradleDistroTestTask.java deleted file mode 100644 index bf81e6316e635..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/GradleDistroTestTask.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.test; - -import org.elasticsearch.gradle.internal.vagrant.VagrantShellTask; -import org.gradle.api.file.ProjectLayout; -import org.gradle.api.tasks.Input; -import org.gradle.api.tasks.options.Option; -import org.gradle.initialization.layout.BuildLayout; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import javax.inject.Inject; - -import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertLinuxPath; -import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertWindowsPath; - -/** - * Run a gradle task of the current build, within the configured vagrant VM. - */ -public class GradleDistroTestTask extends VagrantShellTask { - - private String taskName; - private String testClass; - - private List extraArgs = new ArrayList<>(); - - private final ProjectLayout projectLayout; - private final BuildLayout buildLayout; - - private String logLevel; - - @Inject - public GradleDistroTestTask(BuildLayout buildLayout, ProjectLayout projectLayout) { - super(buildLayout); - this.buildLayout = buildLayout; - this.projectLayout = projectLayout; - } - - public void setTaskName(String taskName) { - this.taskName = taskName; - } - - @Input - public String getTaskName() { - return taskName; - } - - @Option(option = "tests", description = "Sets test class or method name to be included, '*' is supported.") - public void setTestClass(String testClass) { - this.testClass = testClass; - } - - @Input - public List getExtraArgs() { - return extraArgs; - } - - public void extraArg(String arg) { - this.extraArgs.add(arg); - } - - public void setLogLevel(String logLevel) { - this.logLevel = logLevel; - } - - @Override - protected List getWindowsScript() { - return getScript(true); - } - - @Override - protected List getLinuxScript() { - return getScript(false); - } - - private List getScript(boolean isWindows) { - String cacheDir = projectLayout.getBuildDirectory().dir("gradle-cache").get().getAsFile().getAbsolutePath(); - StringBuilder line = new StringBuilder(); - line.append(isWindows ? "& .\\gradlew " : "./gradlew "); - line.append(taskName); - line.append(" --project-cache-dir "); - line.append( - isWindows - ? convertWindowsPath(buildLayout.getRootDirectory(), cacheDir) - : convertLinuxPath(buildLayout.getRootDirectory(), cacheDir) - ); - line.append(" -S"); - line.append(" --parallel"); - line.append(" -D'org.gradle.logging.level'=" + logLevel); - if (testClass != null) { - line.append(" --tests="); - line.append(testClass); - } - extraArgs.stream().map(s -> " " + s).forEach(line::append); - return Collections.singletonList(line.toString()); - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/JavaUtil.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/JavaUtil.java deleted file mode 100644 index de9dffa35d3fd..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/util/JavaUtil.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.util; - -import org.elasticsearch.gradle.internal.info.BuildParams; -import org.elasticsearch.gradle.internal.info.JavaHome; -import org.gradle.api.GradleException; - -import java.util.List; -import java.util.Optional; - -public class JavaUtil { - - /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */ - public static String getJavaHome(final int version) { - List javaHomes = BuildParams.getJavaVersions(); - Optional java = javaHomes.stream().filter(j -> j.getVersion() == version).findFirst(); - return java.orElseThrow(() -> new GradleException("JAVA" + version + "_HOME required")).getJavaHome().get().getAbsolutePath(); - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantBasePlugin.java deleted file mode 100644 index b7b4d20d51f56..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantBasePlugin.java +++ /dev/null @@ -1,149 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.vagrant; - -import org.elasticsearch.gradle.ReaperPlugin; -import org.elasticsearch.gradle.internal.InternalReaperPlugin; -import org.elasticsearch.gradle.util.GradleUtils; -import org.gradle.api.Plugin; -import org.gradle.api.Project; -import org.gradle.api.Task; -import org.gradle.api.execution.TaskActionListener; -import org.gradle.api.execution.TaskExecutionListener; -import org.gradle.api.tasks.TaskState; - -import java.io.ByteArrayOutputStream; -import java.nio.charset.StandardCharsets; -import java.util.List; -import java.util.Locale; -import java.util.function.Consumer; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -public class VagrantBasePlugin implements Plugin { - - @Override - public void apply(Project project) { - project.getRootProject().getPluginManager().apply(VagrantSetupCheckerPlugin.class); - project.getRootProject().getPluginManager().apply(VagrantManagerPlugin.class); - project.getRootProject().getPluginManager().apply(InternalReaperPlugin.class); - - var reaperServiceProvider = GradleUtils.getBuildService(project.getGradle().getSharedServices(), ReaperPlugin.REAPER_SERVICE_NAME); - var extension = project.getExtensions().create("vagrant", VagrantExtension.class, project); - var service = project.getExtensions().create("vagrantService", VagrantMachine.class, extension, reaperServiceProvider); - - project.getGradle() - .getTaskGraph() - .whenReady( - graph -> service.refs = graph.getAllTasks() - .stream() - .filter(t -> t instanceof VagrantShellTask) - .filter(t -> t.getProject() == project) - .count() - ); - } - - /** - * Check vagrant and virtualbox versions, if any vagrant test tasks will be run. - */ - static class VagrantSetupCheckerPlugin implements Plugin { - - private static final Pattern VAGRANT_VERSION = Pattern.compile("Vagrant (\\d+\\.\\d+\\.\\d+)"); - private static final Pattern VIRTUAL_BOX_VERSION = Pattern.compile("(\\d+\\.\\d+)"); - - @Override - public void apply(Project project) { - if (project != project.getRootProject()) { - throw new IllegalArgumentException("VagrantSetupCheckerPlugin can only be applied to the root project of a build"); - } - - project.getGradle().getTaskGraph().whenReady(graph -> { - boolean needsVagrant = graph.getAllTasks().stream().anyMatch(t -> t instanceof VagrantShellTask); - if (needsVagrant) { - checkVersion(project, "vagrant", VAGRANT_VERSION, 1, 8, 6); - checkVersion(project, "vboxmanage", VIRTUAL_BOX_VERSION, 5, 1); - } - }); - } - - void checkVersion(Project project, String tool, Pattern versionRegex, int... minVersion) { - ByteArrayOutputStream pipe = new ByteArrayOutputStream(); - project.exec(spec -> { - spec.setCommandLine(tool, "--version"); - spec.setStandardOutput(pipe); - }); - String output = pipe.toString(StandardCharsets.UTF_8).trim(); - Matcher matcher = versionRegex.matcher(output); - if (matcher.find() == false) { - throw new IllegalStateException( - tool + " version output [" + output + "] did not match regex [" + versionRegex.pattern() + "]" - ); - } - - String version = matcher.group(1); - List versionParts = Stream.of(version.split("\\.")).map(Integer::parseInt).toList(); - for (int i = 0; i < minVersion.length; ++i) { - int found = versionParts.get(i); - if (found > minVersion[i]) { - break; // most significant version is good - } else if (found < minVersion[i]) { - final String exceptionMessage = String.format( - Locale.ROOT, - "Unsupported version of %s. Found [%s], expected [%s+]", - tool, - version, - Stream.of(minVersion).map(String::valueOf).collect(Collectors.joining(".")) - ); - - throw new IllegalStateException(exceptionMessage); - } // else equal, so check next element - } - } - } - - /** - * Adds global hooks to manage destroying, starting and updating VMs. - */ - static class VagrantManagerPlugin implements Plugin, TaskActionListener, TaskExecutionListener { - - @Override - public void apply(Project project) { - if (project != project.getRootProject()) { - throw new IllegalArgumentException("VagrantManagerPlugin can only be applied to the root project of a build"); - } - project.getGradle().addListener(this); - } - - private void callIfVagrantTask(Task task, Consumer method) { - if (task instanceof VagrantShellTask) { - VagrantMachine service = task.getProject().getExtensions().getByType(VagrantMachine.class); - method.accept(service); - } - } - - @Override - public void beforeExecute(Task task) { /* nothing to do */} - - @Override - public void afterActions(Task task) { /* nothing to do */ } - - @Override - public void beforeActions(Task task) { - callIfVagrantTask(task, VagrantMachine::maybeStartVM); - } - - @Override - public void afterExecute(Task task, TaskState state) { - callIfVagrantTask(task, service -> service.maybeStopVM(state.getFailure() != null)); - } - } - -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantExtension.java deleted file mode 100644 index 67799e6a6e751..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantExtension.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.vagrant; - -import org.gradle.api.Project; -import org.gradle.api.file.RegularFileProperty; -import org.gradle.api.provider.MapProperty; -import org.gradle.api.provider.Property; -import org.gradle.api.tasks.Input; - -import java.io.File; -import java.util.Map; - -public class VagrantExtension { - - private final Property box; - private final MapProperty hostEnv; - private final MapProperty vmEnv; - private final RegularFileProperty vagrantfile; - private boolean isWindowsVM; - - public VagrantExtension(Project project) { - this.box = project.getObjects().property(String.class); - this.hostEnv = project.getObjects().mapProperty(String.class, Object.class); - this.vmEnv = project.getObjects().mapProperty(String.class, Object.class); - this.vagrantfile = project.getObjects().fileProperty(); - this.vagrantfile.convention(project.getRootProject().getLayout().getProjectDirectory().file("Vagrantfile")); - this.isWindowsVM = false; - } - - @Input - public String getBox() { - return box.get(); - } - - public void setBox(String box) { - // TODO: should verify this against the Vagrantfile, but would need to do so in afterEvaluate once vagrantfile is unmodifiable - this.box.set(box); - } - - @Input - public Map getHostEnv() { - return hostEnv.get(); - } - - public void hostEnv(String name, Object value) { - hostEnv.put(name, value); - } - - @Input - public Map getVmEnv() { - return vmEnv.get(); - } - - public void vmEnv(String name, Object value) { - vmEnv.put(name, value); - } - - @Input - public boolean isWindowsVM() { - return isWindowsVM; - } - - public void setIsWindowsVM(boolean isWindowsVM) { - this.isWindowsVM = isWindowsVM; - } - - @Input - public File getVagrantfile() { - return this.vagrantfile.get().getAsFile(); - } - - public void setVagrantfile(File file) { - vagrantfile.set(file); - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java deleted file mode 100644 index 6cf614fd7eb3c..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantMachine.java +++ /dev/null @@ -1,202 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.vagrant; - -import org.apache.commons.io.output.TeeOutputStream; -import org.elasticsearch.gradle.LoggedExec; -import org.elasticsearch.gradle.ReaperService; -import org.elasticsearch.gradle.internal.LoggingOutputStream; -import org.elasticsearch.gradle.internal.conventions.util.Util; -import org.gradle.api.Action; -import org.gradle.api.provider.Provider; -import org.gradle.internal.logging.progress.ProgressLogger; -import org.gradle.internal.logging.progress.ProgressLoggerFactory; -import org.gradle.process.ExecOperations; - -import java.io.File; -import java.io.OutputStream; -import java.nio.file.Paths; -import java.util.Arrays; -import java.util.Objects; -import java.util.function.UnaryOperator; - -import javax.inject.Inject; - -/** - * An helper to manage a vagrant box. - * - * This is created alongside a {@link VagrantExtension} for a project to manage starting and - * stopping a single vagrant box. - */ -public class VagrantMachine { - - private final VagrantExtension extension; - private final Provider reaperServiceProvider; - private ReaperService reaper; - // pkg private so plugin can set this after construction - long refs; - private boolean isVMStarted = false; - - public VagrantMachine(VagrantExtension extension, Provider reaperServiceProvider) { - this.extension = extension; - this.reaperServiceProvider = reaperServiceProvider; - } - - @Inject - protected ProgressLoggerFactory getProgressLoggerFactory() { - throw new UnsupportedOperationException(); - } - - @Inject - protected ExecOperations getExecOperations() { - throw new UnsupportedOperationException(); - } - - public void execute(Action action) { - VagrantExecSpec vagrantSpec = new VagrantExecSpec(); - action.execute(vagrantSpec); - - Objects.requireNonNull(vagrantSpec.command); - - LoggedExec.exec(getExecOperations(), execSpec -> { - execSpec.setExecutable("vagrant"); - File vagrantfile = extension.getVagrantfile(); - execSpec.setEnvironment(System.getenv()); // pass through env - execSpec.environment("VAGRANT_CWD", vagrantfile.getParentFile().toString()); - execSpec.environment("VAGRANT_VAGRANTFILE", vagrantfile.getName()); - extension.getHostEnv().forEach(execSpec::environment); - - execSpec.args(vagrantSpec.command); - if (vagrantSpec.subcommand != null) { - execSpec.args(vagrantSpec.subcommand); - } - execSpec.args(extension.getBox()); - if (vagrantSpec.args != null) { - execSpec.args(Arrays.asList(vagrantSpec.args)); - } - - UnaryOperator progressHandler = vagrantSpec.progressHandler; - if (progressHandler == null) { - progressHandler = new VagrantProgressLogger("==> " + extension.getBox() + ": "); - } - OutputStream output = execSpec.getStandardOutput(); - // output from vagrant needs to be manually curated because --machine-readable isn't actually "readable" - OutputStream progressStream = new ProgressOutputStream(vagrantSpec.command, progressHandler); - execSpec.setStandardOutput(new TeeOutputStream(output, progressStream)); - }); - } - - // start the configuration VM if it hasn't been started yet - void maybeStartVM() { - if (isVMStarted) { - return; - } - execute(spec -> { - spec.setCommand("box"); - spec.setSubcommand("update"); - }); - - // Destroying before every execution can be annoying while iterating on tests locally. Therefore, we provide a flag that defaults - // to true that can be used to control whether or not to destroy any test boxes before test execution. - boolean destroyVM = Util.getBooleanProperty("vagrant.destroy", true); - if (destroyVM) { - execute(spec -> { - spec.setCommand("destroy"); - spec.setArgs("--force"); - }); - } - - // register box to be shutdown if gradle dies - reaper = reaperServiceProvider.get(); - reaper.registerCommand(extension.getBox(), "vagrant", "halt", "-f", extension.getBox()); - - // We lock the provider to virtualbox because the Vagrantfile specifies lots of boxes that only work - // properly in virtualbox. Virtualbox is vagrant's default but its possible to change that default and folks do. - execute(spec -> { - spec.setCommand("up"); - spec.setArgs("--provision", "--provider", "virtualbox"); - }); - isVMStarted = true; - } - - // stops the VM if refs are down to 0, or force was called - void maybeStopVM(boolean force) { - assert refs >= 1; - this.refs--; - if ((refs == 0 || force) && isVMStarted) { - execute(spec -> spec.setCommand("halt")); - reaper.unregister(extension.getBox()); - } - } - - public static String convertLinuxPath(File rootDir, String path) { - return "/elasticsearch/" + rootDir.toPath().relativize(Paths.get(path)); - } - - public static String convertWindowsPath(File rootDir, String path) { - return "C:\\elasticsearch\\" + rootDir.toPath().relativize(Paths.get(path)).toString().replace('/', '\\'); - } - - public static class VagrantExecSpec { - private String command; - private String subcommand; - private String[] args; - private UnaryOperator progressHandler; - - private VagrantExecSpec() {} - - public void setCommand(String command) { - this.command = command; - } - - public void setSubcommand(String subcommand) { - this.subcommand = subcommand; - } - - public void setArgs(String... args) { - this.args = args; - } - - /** - * A function to translate output from the vagrant command execution to the progress line. - * - * The function takes the current line of output from vagrant, and returns a new - * progress line, or {@code null} if there is no update. - */ - public void setProgressHandler(UnaryOperator progressHandler) { - this.progressHandler = progressHandler; - } - } - - private class ProgressOutputStream extends LoggingOutputStream { - - private ProgressLogger progressLogger; - private UnaryOperator progressHandler; - - ProgressOutputStream(String command, UnaryOperator progressHandler) { - this.progressHandler = progressHandler; - this.progressLogger = getProgressLoggerFactory().newOperation("vagrant"); - progressLogger.start(extension.getBox() + "> " + command, "hello"); - } - - @Override - protected void logLine(String line) { - String progress = progressHandler.apply(line); - if (progress != null) { - progressLogger.progress(progress); - } - } - - @Override - public void close() { - progressLogger.completed(); - } - } - -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantProgressLogger.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantProgressLogger.java deleted file mode 100644 index a204bbc668db8..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantProgressLogger.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.vagrant; - -import java.util.function.UnaryOperator; - -public class VagrantProgressLogger implements UnaryOperator { - - private static final String HEADING_PREFIX = "==> "; - - private final String squashedPrefix; - private String lastLine = ""; - private String heading = ""; - private boolean inProgressReport = false; - - public VagrantProgressLogger(String squashedPrefix) { - this.squashedPrefix = squashedPrefix; - } - - @Override - public String apply(String line) { - if (line.startsWith("\r\u001b")) { - /* We don't want to try to be a full terminal emulator but we want to - keep the escape sequences from leaking and catch _some_ of the - meaning. */ - line = line.substring(2); - if ("[K".equals(line)) { - inProgressReport = true; - } - return null; - } - if (line.startsWith(squashedPrefix)) { - line = line.substring(squashedPrefix.length()); - inProgressReport = false; - lastLine = line; - if (line.startsWith(HEADING_PREFIX)) { - line = line.substring(HEADING_PREFIX.length()); - heading = line + " > "; - } else { - line = heading + line; - } - } else if (inProgressReport) { - inProgressReport = false; - line = lastLine + line; - } else { - return null; - } - return line; - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantShellTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantShellTask.java deleted file mode 100644 index 8c2bd05a95938..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/vagrant/VagrantShellTask.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.gradle.internal.vagrant; - -import org.gradle.api.DefaultTask; -import org.gradle.api.tasks.Input; -import org.gradle.api.tasks.TaskAction; -import org.gradle.initialization.layout.BuildLayout; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.function.UnaryOperator; - -import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertLinuxPath; -import static org.elasticsearch.gradle.internal.vagrant.VagrantMachine.convertWindowsPath; - -/** - * A shell script to run within a vagrant VM. - * - * The script is run as root within the VM. - */ -public abstract class VagrantShellTask extends DefaultTask { - - private VagrantExtension extension; - private VagrantMachine service; - - private UnaryOperator progressHandler = UnaryOperator.identity(); - private BuildLayout buildLayout; - - public VagrantShellTask(BuildLayout buildLayout) { - this.buildLayout = buildLayout; - } - - @Input - protected abstract List getWindowsScript(); - - @Input - protected abstract List getLinuxScript(); - - @Input - public UnaryOperator getProgressHandler() { - return progressHandler; - } - - public void setProgressHandler(UnaryOperator progressHandler) { - this.progressHandler = progressHandler; - } - - public void setExtension(VagrantExtension extension) { - this.extension = extension; - } - - public void setService(VagrantMachine service) { - this.service = service; - } - - @TaskAction - public void runScript() { - if (extension.isWindowsVM()) { - service.execute(spec -> { - spec.setCommand("winrm"); - List script = new ArrayList<>(); - script.add("try {"); - script.add("cd " + convertWindowsPath(buildLayout.getRootDirectory(), buildLayout.getRootDirectory().toString())); - extension.getVmEnv().forEach((k, v) -> script.add("$Env:" + k + " = \"" + v + "\"")); - script.addAll(getWindowsScript().stream().map(s -> " " + s).toList()); - script.addAll( - Arrays.asList( - " exit $LASTEXITCODE", - "} catch {", - // catch if we have a failure to even run the script at all above, equivalent to set -e, sort of - " echo $_.Exception.Message", - " exit 1", - "}" - ) - ); - spec.setArgs("--elevated", "--command", String.join("\n", script)); - spec.setProgressHandler(progressHandler); - }); - } else { - try { - service.execute(spec -> { - spec.setCommand("ssh"); - - List script = new ArrayList<>(); - script.add("sudo bash -c '"); // start inline bash script - script.add("pwd"); - script.add("cd " + convertLinuxPath(buildLayout.getRootDirectory(), buildLayout.getRootDirectory().toString())); - extension.getVmEnv().forEach((k, v) -> script.add("export " + k + "=" + v)); - script.addAll(getLinuxScript()); - script.add("'"); // end inline bash script - spec.setArgs("--command", String.join("\n", script)); - spec.setProgressHandler(progressHandler); - }); - } catch (Exception e) { - /*getLogger().error("Failed command, dumping dmesg", e); - service.execute(spec -> { - spec.setCommand("ssh"); - spec.setArgs("--command", "dmesg"); - spec.setProgressHandler(line -> { - getLogger().error(line); - return null; - }); - });*/ - throw e; - } - } - } - -} diff --git a/build-tools-internal/src/main/resources/changelog-schema.json b/build-tools-internal/src/main/resources/changelog-schema.json index 644c543222274..87a9313f3eefe 100644 --- a/build-tools-internal/src/main/resources/changelog-schema.json +++ b/build-tools-internal/src/main/resources/changelog-schema.json @@ -75,6 +75,7 @@ "Ranking", "Recovery", "Reindex", + "Rollup", "SQL", "Search", "Security", @@ -277,6 +278,7 @@ "Packaging", "Painless", "REST API", + "Rollup", "System requirement", "Transform" ] diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 791823b722a36..d19500c3c332d 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -155,3 +155,8 @@ org.elasticsearch.cluster.ClusterStateTaskExecutor$TaskContext#success(java.util @defaultMessage ClusterState#compatibilityVersions are for internal use only. Use ClusterState#getMinVersions or a different version. See TransportVersion javadocs for more info. org.elasticsearch.cluster.ClusterState#compatibilityVersions() + +@defaultMessage ClusterFeatures#nodeFeatures is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster. +org.elasticsearch.cluster.ClusterFeatures#nodeFeatures() +@defaultMessage ClusterFeatures#clusterHasFeature is for internal use only. Use FeatureService#clusterHasFeature to determine if a feature is present on the cluster. +org.elasticsearch.cluster.ClusterFeatures#clusterHasFeature(org.elasticsearch.features.NodeFeature) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 5f2e9feca975e..dc43523b747b3 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -2,8 +2,7 @@ elasticsearch = 8.12.0 lucene = 9.8.0 bundled_jdk_vendor = openjdk -bundled_jdk = 21+35@fd2272bbf8e04c3dbaee13770090416c - +bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d # optional dependencies spatial4j = 0.7 jts = 1.15.0 diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/MockApmServer.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/MockApmServer.java new file mode 100644 index 0000000000000..7ec74ee19d1bb --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/MockApmServer.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.testclusters; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; + +/** + * This is a server which just accepts lines of JSON code and if the JSON + * is valid and the root node is "transaction", then adds that JSON object + * to a transaction list which is accessible externally to the class. + * + * The Elastic agent sends lines of JSON code, and so this mock server + * can be used as a basic APM server for testing. + * + * The HTTP server used is the JDK embedded com.sun.net.httpserver + */ +public class MockApmServer { + private static final Logger logger = Logging.getLogger(MockApmServer.class); + private int port; + + public MockApmServer(int port) { + this.port = port; + } + + /** + * Simple main that starts a mock APM server and prints the port it is + * running on. This is not needed + * for testing, it is just a convenient template for trying things out + * if you want play around. + */ + public static void main(String[] args) throws IOException, InterruptedException { + MockApmServer server = new MockApmServer(9999); + server.start(); + } + + private static volatile HttpServer instance; + + /** + * Start the Mock APM server. Just returns empty JSON structures for every incoming message + * @return - the port the Mock APM server started on + * @throws IOException + */ + public synchronized int start() throws IOException { + if (instance != null) { + throw new IOException("MockApmServer: Ooops, you can't start this instance more than once"); + } + InetSocketAddress addr = new InetSocketAddress("0.0.0.0", port); + HttpServer server = HttpServer.create(addr, 10); + server.createContext("/exit", new ExitHandler()); + server.createContext("/", new RootHandler()); + + server.start(); + instance = server; + logger.lifecycle("MockApmServer started on port " + server.getAddress().getPort()); + return server.getAddress().getPort(); + } + + public int getPort() { + return port; + } + + /** + * Stop the server gracefully if possible + */ + public synchronized void stop() { + logger.lifecycle("stopping apm server"); + instance.stop(1); + instance = null; + } + + class RootHandler implements HttpHandler { + public void handle(HttpExchange t) { + try { + InputStream body = t.getRequestBody(); + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + byte[] buffer = new byte[8 * 1024]; + int lengthRead; + while ((lengthRead = body.read(buffer)) > 0) { + bytes.write(buffer, 0, lengthRead); + } + logger.lifecycle(("MockApmServer reading JSON objects: " + bytes.toString())); + + String response = "{}"; + t.sendResponseHeaders(200, response.length()); + OutputStream os = t.getResponseBody(); + os.write(response.getBytes()); + os.close(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + static class ExitHandler implements HttpHandler { + private static final int STOP_TIME = 3; + + public void handle(HttpExchange t) { + try { + InputStream body = t.getRequestBody(); + String response = "{}"; + t.sendResponseHeaders(200, response.length()); + OutputStream os = t.getResponseBody(); + os.write(response.getBytes()); + os.close(); + instance.stop(STOP_TIME); + instance = null; + } catch (Exception e) { + e.printStackTrace(); + } + } + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java index d0b2581895db7..953c0447ec71b 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/RunTask.java @@ -31,7 +31,7 @@ import java.util.function.Function; import java.util.stream.Collectors; -public class RunTask extends DefaultTestClustersTask { +public abstract class RunTask extends DefaultTestClustersTask { public static final String CUSTOM_SETTINGS_PREFIX = "tests.es."; private static final Logger logger = Logging.getLogger(RunTask.class); @@ -40,6 +40,7 @@ public class RunTask extends DefaultTestClustersTask { private static final String transportCertificate = "private-cert2.p12"; private Boolean debug = false; + private Boolean apmServerEnabled = false; private Boolean preserveData = false; @@ -54,6 +55,7 @@ public class RunTask extends DefaultTestClustersTask { private final Path tlsBasePath = Path.of( new File(getProject().getRootDir(), "build-tools-internal/src/main/resources/run.ssl").toURI() ); + private MockApmServer mockServer; @Option(option = "debug-jvm", description = "Enable debugging configuration, to allow attaching a debugger to elasticsearch.") public void setDebug(boolean enabled) { @@ -65,6 +67,16 @@ public Boolean getDebug() { return debug; } + @Input + public Boolean getApmServerEnabled() { + return apmServerEnabled; + } + + @Option(option = "with-apm-server", description = "Run simple logging http server to accept apm requests") + public void setApmServerEnabled(Boolean apmServerEnabled) { + this.apmServerEnabled = apmServerEnabled; + } + @Option(option = "data-dir", description = "Override the base data directory used by the testcluster") public void setDataDir(String dataDirStr) { dataDir = Paths.get(dataDirStr).toAbsolutePath(); @@ -172,6 +184,19 @@ public void beforeStart() { node.setting("xpack.security.transport.ssl.keystore.path", "transport.keystore"); node.setting("xpack.security.transport.ssl.certificate_authorities", "transport.ca"); } + + if (apmServerEnabled) { + mockServer = new MockApmServer(9999); + try { + mockServer.start(); + node.setting("telemetry.metrics.enabled", "true"); + node.setting("tracing.apm.agent.server_url", "http://127.0.0.1:" + mockServer.getPort()); + } catch (IOException e) { + logger.warn("Unable to start APM server", e); + } + + } + } } if (debug) { @@ -242,6 +267,10 @@ public void runAndWait() throws IOException { if (thrown != null) { logger.debug("exception occurred during close of stdout file readers", thrown); } + + if (apmServerEnabled && mockServer != null) { + mockServer.stop(); + } } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 586a9485ab834..72a462c3cd8c9 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -16,8 +16,6 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Task; -import org.gradle.api.execution.TaskActionListener; -import org.gradle.api.execution.TaskExecutionListener; import org.gradle.api.file.ArchiveOperations; import org.gradle.api.file.FileSystemOperations; import org.gradle.api.internal.file.FileOperations; @@ -26,11 +24,19 @@ import org.gradle.api.logging.Logging; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; -import org.gradle.api.tasks.TaskState; +import org.gradle.api.services.BuildService; +import org.gradle.api.services.BuildServiceParameters; +import org.gradle.build.event.BuildEventsListenerRegistry; import org.gradle.internal.jvm.Jvm; import org.gradle.process.ExecOperations; +import org.gradle.tooling.events.FinishEvent; +import org.gradle.tooling.events.OperationCompletionListener; +import org.gradle.tooling.events.task.TaskFailureResult; +import org.gradle.tooling.events.task.TaskFinishEvent; import java.io.File; +import java.util.HashMap; +import java.util.Map; import java.util.function.Function; import javax.inject.Inject; @@ -153,17 +159,26 @@ private void createListClustersTask(Project project, NamedDomainObjectContainer< } - static class TestClustersHookPlugin implements Plugin { - @Override + static abstract class TestClustersHookPlugin implements Plugin { + @Inject + public abstract BuildEventsListenerRegistry getEventsListenerRegistry(); + + @Inject + public TestClustersHookPlugin() {} + public void apply(Project project) { if (project != project.getRootProject()) { throw new IllegalStateException(this.getClass().getName() + " can only be applied to the root project."); } - Provider registryProvider = GradleUtils.getBuildService( project.getGradle().getSharedServices(), REGISTRY_SERVICE_NAME ); + + Provider testClusterTasksService = project.getGradle() + .getSharedServices() + .registerIfAbsent("testClusterTasksService", TaskEventsService.class, spec -> {}); + TestClustersRegistry registry = registryProvider.get(); // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters @@ -173,10 +188,10 @@ public void apply(Project project) { configureClaimClustersHook(project.getGradle(), registry); // Before each task, we determine if a cluster needs to be started for that task. - configureStartClustersHook(project.getGradle(), registry); + configureStartClustersHook(project.getGradle(), registry, testClusterTasksService); // After each task we determine if there are clusters that are no longer needed. - configureStopClustersHook(project.getGradle(), registry); + getEventsListenerRegistry().onTaskCompletion(testClusterTasksService); } private static void configureClaimClustersHook(Gradle gradle, TestClustersRegistry registry) { @@ -192,39 +207,57 @@ private static void configureClaimClustersHook(Gradle gradle, TestClustersRegist }); } - private static void configureStartClustersHook(Gradle gradle, TestClustersRegistry registry) { - gradle.addListener(new TaskActionListener() { - @Override - public void beforeActions(Task task) { - if (task instanceof TestClustersAware == false) { - return; - } - // we only start the cluster before the actions, so we'll not start it if the task is up-to-date - TestClustersAware awareTask = (TestClustersAware) task; - awareTask.beforeStart(); - awareTask.getClusters().forEach(registry::maybeStartCluster); - } - - @Override - public void afterActions(Task task) {} + private void configureStartClustersHook( + Gradle gradle, + TestClustersRegistry registry, + Provider testClusterTasksService + ) { + testClusterTasksService.get().registry(registry); + gradle.getTaskGraph().whenReady(taskExecutionGraph -> { + taskExecutionGraph.getAllTasks() + .stream() + .filter(task -> task instanceof TestClustersAware) + .map(task -> (TestClustersAware) task) + .forEach(awareTask -> { + testClusterTasksService.get().register(awareTask.getPath(), awareTask); + awareTask.doFirst(task -> { + awareTask.beforeStart(); + awareTask.getClusters().forEach(registry::maybeStartCluster); + }); + }); }); } + } + + static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { + + Map tasksMap = new HashMap<>(); + private TestClustersRegistry registryProvider; + + public void register(String path, TestClustersAware task) { + tasksMap.put(path, task); + } - private static void configureStopClustersHook(Gradle gradle, TestClustersRegistry registry) { - gradle.addListener(new TaskExecutionListener() { - @Override - public void afterExecute(Task task, TaskState state) { - if (task instanceof TestClustersAware == false) { - return; + public void registry(TestClustersRegistry registry) { + registryProvider = registry; + } + + @Override + public void onFinish(FinishEvent finishEvent) { + if (finishEvent instanceof TaskFinishEvent taskFinishEvent) { + // Handle task finish event... + String taskPath = taskFinishEvent.getDescriptor().getTaskPath(); + if (tasksMap.containsKey(taskPath)) { + TestClustersAware task = tasksMap.get(taskPath); + // unclaim the cluster if the task has been executed and the cluster has been claimed in the doFirst block. + if (task.getDidWork()) { + task.getClusters() + .forEach( + cluster -> registryProvider.stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) + ); } - // always unclaim the cluster, even if _this_ task is up-to-date, as others might not have been - // and caused the cluster to start. - ((TestClustersAware) task).getClusters().forEach(cluster -> registry.stopCluster(cluster, state.getFailure() != null)); } - - @Override - public void beforeExecute(Task task) {} - }); + } } } } diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index 7b7040dfd7098..96e577d5635ab 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -407,7 +407,7 @@ void addBuildDockerImageTask(Architecture architecture, DockerBase base) { if (base == DockerBase.IRON_BANK) { Map buildArgsMap = [ 'BASE_REGISTRY': 'docker.elastic.co', - 'BASE_IMAGE' : 'ubi8/ubi', + 'BASE_IMAGE' : 'ubi9/ubi', 'BASE_TAG' : 'latest' ] diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index a6ecfdaf417e1..b62fa983dd480 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -21,8 +21,8 @@ <% if (docker_base == 'iron_bank') { %> ARG BASE_REGISTRY=registry1.dso.mil -ARG BASE_IMAGE=ironbank/redhat/ubi/ubi8 -ARG BASE_TAG=8.6 +ARG BASE_IMAGE=redhat/ubi/ubi9 +ARG BASE_TAG=9.2 <% } %> ################################################################################ diff --git a/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml b/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml index 9fce16efad328..7152f6d18f1d2 100644 --- a/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml +++ b/distribution/docker/src/docker/iron_bank/hardening_manifest.yaml @@ -13,8 +13,8 @@ tags: # Build args passed to Dockerfile ARGs args: - BASE_IMAGE: "redhat/ubi/ubi8" - BASE_TAG: "8.6" + BASE_IMAGE: "redhat/ubi/ubi9" + BASE_TAG: "9.2" # Docker image labels labels: diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java index 28a086cf6f8f7..b6cd680cb5816 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/APMJvmOptions.java @@ -45,12 +45,6 @@ class APMJvmOptions { // Identifies the version of Elasticsearch in the captured trace data. "service_version", Build.current().version(), - // Configures a log file to write to. `_AGENT_HOME_` is a placeholder used - // by the agent. Don't disable writing to a log file, as the agent will then - // require extra Security Manager permissions when it tries to do something - // else, and it's just painful. - "log_file", "_AGENT_HOME_/../../logs/apm.log", - // ES does not use auto-instrumentation. "instrument", "false", "enable_experimental_instrumentations", "true" @@ -80,7 +74,8 @@ class APMJvmOptions { // Logging configuration. Unless you need detailed logs about what the APM // is doing, leave this value alone. - "log_level", "error", + "log_level", "warn", + "log_format_file", "JSON", "application_packages", "org.elasticsearch,org.apache.lucene", "metrics_interval", "120s", "breakdown_metrics", "false", @@ -134,9 +129,11 @@ class APMJvmOptions { * * @param settings the Elasticsearch settings to consider * @param secrets a wrapper to access the secrets, or null if there is no secrets + * @param logsDir the directory to write the apm log into * @param tmpdir Elasticsearch's temporary directory, where the config file will be written */ - static List apmJvmOptions(Settings settings, @Nullable SecureSettings secrets, Path tmpdir) throws UserException, IOException { + static List apmJvmOptions(Settings settings, @Nullable SecureSettings secrets, Path logsDir, Path tmpdir) throws UserException, + IOException { final Path agentJar = findAgentJar(); if (agentJar == null) { @@ -145,6 +142,11 @@ static List apmJvmOptions(Settings settings, @Nullable SecureSettings se final Map propertiesMap = extractApmSettings(settings); + // Configures a log file to write to. Don't disable writing to a log file, + // as the agent will then require extra Security Manager permissions when + // it tries to do something else, and it's just painful. + propertiesMap.put("log_file", logsDir.resolve("apm-agent.log").toString()); + // No point doing anything if we don't have a destination for the trace data, and it can't be configured dynamically if (propertiesMap.containsKey("server_url") == false && propertiesMap.containsKey("server_urls") == false) { return List.of(); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index 1b92300ac3dd2..5999f618bc0ab 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -139,7 +139,7 @@ private List jvmOptions( final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(); - final List apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), args.secrets(), tmpDir); + final List apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), args.secrets(), args.logsDir(), tmpDir); final List finalJvmOptions = new ArrayList<>( systemJvmOptions.size() + substitutedJvmOptions.size() + ergonomicJvmOptions.size() + apmOptions.size() diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index 7b3adadb29b4c..25c61c41638d1 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -219,7 +219,7 @@ private ServerArgs createArgs(OptionSet options, Environment env, SecureSettings } validatePidFile(pidFile); } - return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configFile()); + return new ServerArgs(daemonize, quiet, pidFile, secrets, env.settings(), env.configFile(), env.logsFile()); } @Override diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index c8dfb84363d62..57993d40391ac 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -195,7 +195,15 @@ public Process destroyForcibly() { ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePassword) throws Exception { var pinfo = new ProcessInfo(Map.copyOf(sysprops), Map.copyOf(envVars), esHomeDir); - var args = new ServerArgs(daemonize, quiet, null, secrets, nodeSettings.build(), esHomeDir.resolve("config")); + var args = new ServerArgs( + daemonize, + quiet, + null, + secrets, + nodeSettings.build(), + esHomeDir.resolve("config"), + esHomeDir.resolve("logs") + ); ServerProcess.ProcessStarter starter = pb -> { if (processValidator != null) { processValidator.validate(pb); diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 5d1e67bc92fb8..858787b361654 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -37,7 +37,7 @@ class WindowsServiceDaemon extends EnvironmentAwareCommand { public void execute(Terminal terminal, OptionSet options, Environment env, ProcessInfo processInfo) throws Exception { // the Windows service daemon doesn't support secure settings implementations other than the keystore try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0]))) { - var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configFile()); + var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configFile(), env.logsFile()); this.server = ServerProcess.start(terminal, processInfo, args); // start does not return until the server is ready, and we do not wait for the process } diff --git a/docs/changelog/100642.yaml b/docs/changelog/100642.yaml new file mode 100644 index 0000000000000..805a20174e11d --- /dev/null +++ b/docs/changelog/100642.yaml @@ -0,0 +1,6 @@ +pr: 100642 +summary: "ESQL: Alias duplicated aggregations in a stats" +area: ES|QL +type: enhancement +issues: + - 100544 diff --git a/docs/changelog/100776.yaml b/docs/changelog/100776.yaml new file mode 100644 index 0000000000000..a0bde13f47c92 --- /dev/null +++ b/docs/changelog/100776.yaml @@ -0,0 +1,6 @@ +pr: 100776 +summary: Health Report API should not return RED for unassigned cold/frozen shards + when data is available +area: ILM+SLM +type: enhancement +issues: [] diff --git a/docs/changelog/100782.yaml b/docs/changelog/100782.yaml new file mode 100644 index 0000000000000..c6007bfb4d9ba --- /dev/null +++ b/docs/changelog/100782.yaml @@ -0,0 +1,8 @@ +pr: 100782 +summary: "ESQL: `mv_expand` pushes down limit and project and keep the limit after\ + \ it untouched" +area: ES|QL +type: bug +issues: + - 99971 + - 100774 diff --git a/docs/changelog/100808.yaml b/docs/changelog/100808.yaml new file mode 100644 index 0000000000000..1abbfdcebf74e --- /dev/null +++ b/docs/changelog/100808.yaml @@ -0,0 +1,5 @@ +pr: 100808 +summary: Make tasks that calculate checkpoints cancellable +area: Transform +type: bug +issues: [] diff --git a/docs/changelog/100899.yaml b/docs/changelog/100899.yaml new file mode 100644 index 0000000000000..988546bb22cbe --- /dev/null +++ b/docs/changelog/100899.yaml @@ -0,0 +1,5 @@ +pr: 100899 +summary: Add methods for adding generation listeners with primary term +area: Store +type: enhancement +issues: [] diff --git a/docs/changelog/100974.yaml b/docs/changelog/100974.yaml new file mode 100644 index 0000000000000..e5d3a4ad3c9df --- /dev/null +++ b/docs/changelog/100974.yaml @@ -0,0 +1,5 @@ +pr: 100974 +summary: Create new cluster state API for querying features present on a cluster +area: "Infra/Core" +type: feature +issues: [] diff --git a/docs/changelog/101026.yaml b/docs/changelog/101026.yaml new file mode 100644 index 0000000000000..cee85a722d7fa --- /dev/null +++ b/docs/changelog/101026.yaml @@ -0,0 +1,5 @@ +pr: 101026 +summary: Remove `auto_configure` privilege for profiling +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/101050.yaml b/docs/changelog/101050.yaml new file mode 100644 index 0000000000000..1a68466e6e728 --- /dev/null +++ b/docs/changelog/101050.yaml @@ -0,0 +1,5 @@ +pr: 101050 +summary: Ensure the correct `threadContext` for `RemoteClusterNodesAction` +area: Network +type: bug +issues: [] diff --git a/docs/changelog/101051.yaml b/docs/changelog/101051.yaml new file mode 100644 index 0000000000000..05e7443dac8b3 --- /dev/null +++ b/docs/changelog/101051.yaml @@ -0,0 +1,6 @@ +pr: 101051 +summary: Percolator to support parsing script score query with params +area: Mapping +type: bug +issues: + - 97377 diff --git a/docs/changelog/101055.yaml b/docs/changelog/101055.yaml new file mode 100644 index 0000000000000..e4ca4548c2ef6 --- /dev/null +++ b/docs/changelog/101055.yaml @@ -0,0 +1,5 @@ +pr: 101055 +summary: Make tasks that calculate checkpoints time out +area: Transform +type: enhancement +issues: [] diff --git a/docs/changelog/101057.yaml b/docs/changelog/101057.yaml new file mode 100644 index 0000000000000..2024c714f58b0 --- /dev/null +++ b/docs/changelog/101057.yaml @@ -0,0 +1,5 @@ +pr: 101057 +summary: Add error logging for *QL +area: EQL +type: enhancement +issues: [] diff --git a/docs/changelog/101093.yaml b/docs/changelog/101093.yaml new file mode 100644 index 0000000000000..99765170dd257 --- /dev/null +++ b/docs/changelog/101093.yaml @@ -0,0 +1,6 @@ +pr: 101093 +summary: Make IPAddress writeable +area: Infra/Scripting +type: bug +issues: + - 101082 diff --git a/docs/changelog/101120.yaml b/docs/changelog/101120.yaml new file mode 100644 index 0000000000000..bf359eb21be9f --- /dev/null +++ b/docs/changelog/101120.yaml @@ -0,0 +1,6 @@ +pr: 101120 +summary: "ESQL: Fix escaping of backslash in LIKE operator" +area: ES|QL +type: bug +issues: + - 101106 diff --git a/docs/changelog/101126.yaml b/docs/changelog/101126.yaml new file mode 100644 index 0000000000000..7a0f45891b171 --- /dev/null +++ b/docs/changelog/101126.yaml @@ -0,0 +1,5 @@ +pr: 101126 +summary: Include totals in flamegraph response +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/101133.yaml b/docs/changelog/101133.yaml new file mode 100644 index 0000000000000..546a5392c309a --- /dev/null +++ b/docs/changelog/101133.yaml @@ -0,0 +1,5 @@ +pr: 101133 +summary: Update bundled JDK to 21.0.1 +area: Packaging +type: upgrade +issues: [] diff --git a/docs/changelog/101147.yaml b/docs/changelog/101147.yaml new file mode 100644 index 0000000000000..cb556af35eead --- /dev/null +++ b/docs/changelog/101147.yaml @@ -0,0 +1,5 @@ +pr: 101147 +summary: Persist data counts on job close before results index refresh +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/101184.yaml b/docs/changelog/101184.yaml new file mode 100644 index 0000000000000..ac2f5f3ee8af1 --- /dev/null +++ b/docs/changelog/101184.yaml @@ -0,0 +1,6 @@ +pr: 101184 +summary: More robust timeout for repo analysis +area: Snapshot/Restore +type: bug +issues: + - 101182 diff --git a/docs/changelog/101185.yaml b/docs/changelog/101185.yaml new file mode 100644 index 0000000000000..63d3a4da328b1 --- /dev/null +++ b/docs/changelog/101185.yaml @@ -0,0 +1,5 @@ +pr: 101185 +summary: Repo analysis of uncontended register behaviour +area: Snapshot/Restore +type: enhancement +issues: [] diff --git a/docs/changelog/101202.yaml b/docs/changelog/101202.yaml new file mode 100644 index 0000000000000..565338a2dbb6e --- /dev/null +++ b/docs/changelog/101202.yaml @@ -0,0 +1,5 @@ +pr: 101202 +summary: Optimize `MurmurHash3` +area: "Ingest Node" +type: enhancement +issues: [] diff --git a/docs/changelog/101205.yaml b/docs/changelog/101205.yaml new file mode 100644 index 0000000000000..528f6fb35846e --- /dev/null +++ b/docs/changelog/101205.yaml @@ -0,0 +1,5 @@ +pr: 101205 +summary: Increase K/V look-back time interval +area: Application +type: bug +issues: [] diff --git a/docs/changelog/101212.yaml b/docs/changelog/101212.yaml new file mode 100644 index 0000000000000..ed2b433209e8d --- /dev/null +++ b/docs/changelog/101212.yaml @@ -0,0 +1,6 @@ +pr: 101212 +summary: Fix painless execute api and tsdb issue +area: TSDB +type: bug +issues: + - 101072 diff --git a/docs/changelog/101230.yaml b/docs/changelog/101230.yaml new file mode 100644 index 0000000000000..3ed7eacb3fce0 --- /dev/null +++ b/docs/changelog/101230.yaml @@ -0,0 +1,12 @@ +pr: 101230 +summary: Enable query phase parallelism within a single shard +area: Search +type: enhancement +issues: + - 80693 +highlight: + title: Enable query phase parallelism within a single shard + body: |- + Activate inter-segment search concurrency by default in the query phase, in order to + enable parallelizing search execution across segments that a single shard is made of. + notable: true diff --git a/docs/changelog/101235.yaml b/docs/changelog/101235.yaml new file mode 100644 index 0000000000000..53adf9527c2c4 --- /dev/null +++ b/docs/changelog/101235.yaml @@ -0,0 +1,5 @@ +pr: 101235 +summary: Load different way +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/101245.yaml b/docs/changelog/101245.yaml new file mode 100644 index 0000000000000..2f9fef318f31a --- /dev/null +++ b/docs/changelog/101245.yaml @@ -0,0 +1,5 @@ +pr: 101245 +summary: Make S3 anti-contention delay configurable +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/changelog/101255.yaml b/docs/changelog/101255.yaml new file mode 100644 index 0000000000000..37d8f7e3c14fe --- /dev/null +++ b/docs/changelog/101255.yaml @@ -0,0 +1,5 @@ +pr: 101255 +summary: Provide stable resampling +area: Application +type: bug +issues: [] diff --git a/docs/changelog/101264.yaml b/docs/changelog/101264.yaml new file mode 100644 index 0000000000000..7160240b2f3a0 --- /dev/null +++ b/docs/changelog/101264.yaml @@ -0,0 +1,5 @@ +pr: 101264 +summary: Align look-back with client-side cache +area: Application +type: bug +issues: [] diff --git a/docs/changelog/101265.yaml b/docs/changelog/101265.yaml new file mode 100644 index 0000000000000..f39b57fa9a75e --- /dev/null +++ b/docs/changelog/101265.yaml @@ -0,0 +1,13 @@ +pr: 101265 +summary: Rollup functionality is now deprecated +area: Rollup +type: deprecation +issues: [] +deprecation: + title: >- + Rollup functionality is now deprecated + area: Rollup + details: |- + {ref}/xpack-rollup[Rollup functionality] has been deprecated and will be removed in a future release. Previously, rollups were available in technical preview. + impact: |- + Use {ref}/downsampling.html[downsampling] to reduce storage costs for time series data by by storing it at reduced granularity. diff --git a/docs/changelog/101344.yaml b/docs/changelog/101344.yaml new file mode 100644 index 0000000000000..b546e743301f6 --- /dev/null +++ b/docs/changelog/101344.yaml @@ -0,0 +1,5 @@ +pr: 101344 +summary: Register `repository_s3` settings +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/changelog/101346.yaml b/docs/changelog/101346.yaml new file mode 100644 index 0000000000000..b32b123c506d1 --- /dev/null +++ b/docs/changelog/101346.yaml @@ -0,0 +1,5 @@ +pr: 101346 +summary: Report full stack trace for non-state file settings transforms +area: Infra/Settings +type: bug +issues: [] diff --git a/docs/changelog/101358.yaml b/docs/changelog/101358.yaml new file mode 100644 index 0000000000000..3ae2a44e15e5e --- /dev/null +++ b/docs/changelog/101358.yaml @@ -0,0 +1,6 @@ +pr: 101358 +summary: Make DISSECT parameter `append_separator` case insensitive +area: ES|QL +type: bug +issues: + - 101138 diff --git a/docs/changelog/101362.yaml b/docs/changelog/101362.yaml new file mode 100644 index 0000000000000..e1d763cd416fa --- /dev/null +++ b/docs/changelog/101362.yaml @@ -0,0 +1,6 @@ +pr: 101362 +summary: "ESQL: Remove the swapped-args check for date_xxx()" +area: ES|QL +type: enhancement +issues: + - 99562 diff --git a/docs/changelog/101383.yaml b/docs/changelog/101383.yaml new file mode 100644 index 0000000000000..4875403acfaeb --- /dev/null +++ b/docs/changelog/101383.yaml @@ -0,0 +1,5 @@ +pr: 101383 +summary: "ESQL: Track memory from values loaded from lucene" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/101385.yaml b/docs/changelog/101385.yaml new file mode 100644 index 0000000000000..406ed804cbbcc --- /dev/null +++ b/docs/changelog/101385.yaml @@ -0,0 +1,6 @@ +pr: 101385 +summary: "ESQL: Fix planning of MV_EXPAND with foldable expressions" +area: ES|QL +type: bug +issues: + - 101118 diff --git a/docs/changelog/101392.yaml b/docs/changelog/101392.yaml new file mode 100644 index 0000000000000..af79917245726 --- /dev/null +++ b/docs/changelog/101392.yaml @@ -0,0 +1,5 @@ +pr: 101392 +summary: Include ML processor limits in `_ml/info` response +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/101396.yaml b/docs/changelog/101396.yaml new file mode 100644 index 0000000000000..a486b2bed9237 --- /dev/null +++ b/docs/changelog/101396.yaml @@ -0,0 +1,5 @@ +pr: 101396 +summary: "ESQL: Track blocks emitted from lucene" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/101438.yaml b/docs/changelog/101438.yaml new file mode 100644 index 0000000000000..8189ee96b6576 --- /dev/null +++ b/docs/changelog/101438.yaml @@ -0,0 +1,6 @@ +pr: 101438 +summary: "ESQL: Fix eval of functions on foldable literals" +area: ES|QL +type: bug +issues: + - 101425 diff --git a/docs/changelog/101456.yaml b/docs/changelog/101456.yaml new file mode 100644 index 0000000000000..db55dfbde1c64 --- /dev/null +++ b/docs/changelog/101456.yaml @@ -0,0 +1,6 @@ +pr: 101456 +summary: "ESQL: adds Enrich implicit `match_fields` to `field_caps` call" +area: ES|QL +type: bug +issues: + - 101328 diff --git a/docs/changelog/101457.yaml b/docs/changelog/101457.yaml new file mode 100644 index 0000000000000..03bdbe39b5b8e --- /dev/null +++ b/docs/changelog/101457.yaml @@ -0,0 +1,14 @@ +pr: 101457 +summary: "Remove Plugin.createComponents method in favour of overload with a PluginServices object" +area: Infra/Plugins +type: breaking-java +breaking: + area: "Java API" + title: "Plugin.createComponents method has been refactored to take a single PluginServices object" + details: > + Plugin.createComponents currently takes several different service arguments. The signature of this method changes + every time a new service is added. The method has now been modified to take a single interface object + that new services are added to. This will reduce API incompatibility issues when a new service + is introduced in the future. + impact: "Plugins that override createComponents will need to be refactored to override the new method on ES 8.12+" + notable: false diff --git a/docs/changelog/101474.yaml b/docs/changelog/101474.yaml new file mode 100644 index 0000000000000..2c013fe5d2537 --- /dev/null +++ b/docs/changelog/101474.yaml @@ -0,0 +1,5 @@ +pr: 101474 +summary: "[Search Applications] Return 400 response when template rendering produces invalid JSON" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/101486.yaml b/docs/changelog/101486.yaml new file mode 100644 index 0000000000000..99795feda328f --- /dev/null +++ b/docs/changelog/101486.yaml @@ -0,0 +1,5 @@ +pr: 101486 +summary: Improving tika handling +area: Ingest Node +type: bug +issues: [] diff --git a/docs/changelog/101488.yaml b/docs/changelog/101488.yaml new file mode 100644 index 0000000000000..1db48a63f8542 --- /dev/null +++ b/docs/changelog/101488.yaml @@ -0,0 +1,5 @@ +pr: 101488 +summary: "ESQL: More tracking in `BlockHash` impls" +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/101495.yaml b/docs/changelog/101495.yaml new file mode 100644 index 0000000000000..f61c9b824b77c --- /dev/null +++ b/docs/changelog/101495.yaml @@ -0,0 +1,5 @@ +pr: 101495 +summary: "[DSL] skip deleting indices that have in-progress downsampling operations" +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/101497.yaml b/docs/changelog/101497.yaml new file mode 100644 index 0000000000000..7909cb1ecdc0d --- /dev/null +++ b/docs/changelog/101497.yaml @@ -0,0 +1,5 @@ +pr: 101497 +summary: Fix snapshot double finalization +area: Snapshot/Restore +type: bug +issues: [] diff --git a/docs/changelog/101518.yaml b/docs/changelog/101518.yaml new file mode 100644 index 0000000000000..53db542640348 --- /dev/null +++ b/docs/changelog/101518.yaml @@ -0,0 +1,6 @@ +pr: 101518 +summary: Check that scripts produce correct json in render template action +area: Search +type: bug +issues: + - 101477 diff --git a/docs/changelog/98244.yaml b/docs/changelog/98244.yaml deleted file mode 100644 index e1dde59a83e47..0000000000000 --- a/docs/changelog/98244.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 98244 -summary: Optimize ContentPath#pathAsText -area: Search -type: enhancement -issues: - - 94544 diff --git a/docs/changelog/99752.yaml b/docs/changelog/99752.yaml new file mode 100644 index 0000000000000..c137a563bea39 --- /dev/null +++ b/docs/changelog/99752.yaml @@ -0,0 +1,5 @@ +pr: 99752 +summary: Pass shard's primary term to Engine#addSegmentGenerationListener +area: Store +type: enhancement +issues: [] diff --git a/docs/changelog/99975.yaml b/docs/changelog/99975.yaml new file mode 100644 index 0000000000000..a34746c27ec99 --- /dev/null +++ b/docs/changelog/99975.yaml @@ -0,0 +1,5 @@ +pr: 99975 +summary: Rename component templates and pipelines according to the new naming conventions +area: Indices APIs +type: enhancement +issues: [] diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index 30be28614d122..ce3d0a367dc4e 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -38,7 +38,7 @@ Use `synonyms_set` configuration option to provide a synonym set created via Syn ---- "filter": { "synonyms_filter": { - "type": "synonym", + "type": "synonym_graph", "synonyms_set": "my-synonym-set", "updateable": true } @@ -51,7 +51,7 @@ Use `synonyms_path` to provide a synonym file : ---- "filter": { "synonyms_filter": { - "type": "synonym", + "type": "synonym_graph", "synonyms_path": "analysis/synonym-set.txt" } } @@ -66,7 +66,7 @@ Use `synonyms` to define inline synonyms: ---- "filter": { "synonyms_filter": { - "type": "synonym", + "type": "synonym_graph", "synonyms": ["pc => personal computer", "computer, pc, laptop"] } } diff --git a/docs/reference/ccr/bi-directional-disaster-recovery.asciidoc b/docs/reference/ccr/bi-directional-disaster-recovery.asciidoc index 614af8846230e..b491e90053031 100644 --- a/docs/reference/ccr/bi-directional-disaster-recovery.asciidoc +++ b/docs/reference/ccr/bi-directional-disaster-recovery.asciidoc @@ -10,7 +10,7 @@ ---- PUT _data_stream/logs-generic-default ---- -// TESTSETUP +// TESTSETUP [source,console] ---- @@ -20,12 +20,12 @@ DELETE /_data_stream/* //// Learn how to set up disaster recovery between two clusters based on -bi-directional {ccr}. The following tutorial is designed for data streams which support -<> and <>. You can only perform these actions on the leader index. +bi-directional {ccr}. The following tutorial is designed for data streams which support +<> and <>. You can only perform these actions on the leader index. -This tutorial works with {ls} as the source of ingestion. It takes advantage of a {ls} feature where {logstash-ref}/plugins-outputs-elasticsearch.html[the {ls} output to {es}] can be load balanced across an array of hosts specified. {beats} and {agents} currently do not -support multiple outputs. It should also be possible to set up a proxy -(load balancer) to redirect traffic without {ls} in this tutorial. +This tutorial works with {ls} as the source of ingestion. It takes advantage of a {ls} feature where {logstash-ref}/plugins-outputs-elasticsearch.html[the {ls} output to {es}] can be load balanced across an array of hosts specified. {beats} and {agents} currently do not +support multiple outputs. It should also be possible to set up a proxy +(load balancer) to redirect traffic without {ls} in this tutorial. * Setting up a remote cluster on `clusterA` and `clusterB`. * Setting up bi-directional cross-cluster replication with exclusion patterns. @@ -92,7 +92,7 @@ PUT /_ccr/auto_follow/logs-generic-default "leader_index_patterns": [ ".ds-logs-generic-default-20*" ], - "leader_index_exclusion_patterns":"{{leader_index}}-replicated_from_clustera", + "leader_index_exclusion_patterns":"*-replicated_from_clustera", "follow_index_pattern": "{{leader_index}}-replicated_from_clusterb" } @@ -103,7 +103,7 @@ PUT /_ccr/auto_follow/logs-generic-default "leader_index_patterns": [ ".ds-logs-generic-default-20*" ], - "leader_index_exclusion_patterns":"{{leader_index}}-replicated_from_clusterb", + "leader_index_exclusion_patterns":"*-replicated_from_clusterb", "follow_index_pattern": "{{leader_index}}-replicated_from_clustera" } ---- @@ -126,7 +126,7 @@ pattern in the UI. Use the API in this step. + This example uses the input generator to demonstrate the document count in the clusters. Reconfigure this section -to suit your own use case. +to suit your own use case. + [source,logstash] ---- @@ -171,7 +171,7 @@ Bi-directional {ccr} will create one more data stream on each of the clusters with the `-replication_from_cluster{a|b}` suffix. At the end of this step: + * data streams on cluster A contain: -** 50 documents in `logs-generic-default-replicated_from_clusterb` +** 50 documents in `logs-generic-default-replicated_from_clusterb` ** 50 documents in `logs-generic-default` * data streams on cluster B contain: ** 50 documents in `logs-generic-default-replicated_from_clustera` @@ -179,7 +179,7 @@ with the `-replication_from_cluster{a|b}` suffix. At the end of this step: . Queries should be set up to search across both data streams. A query on `logs*`, on either of the clusters, returns 100 -hits in total. +hits in total. + [source,console] ---- @@ -199,27 +199,27 @@ use cases where {ls} ingests continuously.) bin/logstash -f multiple_hosts.conf ---- -. Observe all {ls} traffic will be redirected to `cluster B` automatically. +. Observe all {ls} traffic will be redirected to `cluster B` automatically. + -TIP: You should also redirect all search traffic to the `clusterB` cluster during this time. +TIP: You should also redirect all search traffic to the `clusterB` cluster during this time. -. The two data streams on `cluster B` now contain a different number of documents. +. The two data streams on `cluster B` now contain a different number of documents. + -* data streams on cluster A (down) -** 50 documents in `logs-generic-default-replicated_from_clusterb` +* data streams on cluster A (down) +** 50 documents in `logs-generic-default-replicated_from_clusterb` ** 50 documents in `logs-generic-default` -* data streams On cluster B (up) +* data streams On cluster B (up) ** 50 documents in `logs-generic-default-replicated_from_clustera` ** 150 documents in `logs-generic-default` ==== Failback when `clusterA` comes back -. You can simulate this by turning `cluster A` back on. +. You can simulate this by turning `cluster A` back on. . Data ingested to `cluster B` during `cluster A` 's downtime will be -automatically replicated. +automatically replicated. + * data streams on cluster A -** 150 documents in `logs-generic-default-replicated_from_clusterb` +** 150 documents in `logs-generic-default-replicated_from_clusterb` ** 50 documents in `logs-generic-default` * data streams on cluster B ** 50 documents in `logs-generic-default-replicated_from_clustera` @@ -271,5 +271,5 @@ POST logs-generic-default/_update_by_query } } ---- -+ ++ TIP: If a soft delete is merged away before it can be replicated to a follower the following process will fail due to incomplete history on the leader, see <> for more details. diff --git a/docs/reference/data-rollup-transform.asciidoc b/docs/reference/data-rollup-transform.asciidoc index 3a7b3bedc7765..3116d4117f70e 100644 --- a/docs/reference/data-rollup-transform.asciidoc +++ b/docs/reference/data-rollup-transform.asciidoc @@ -8,6 +8,8 @@ * <> + +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] ++ include::rollup/index.asciidoc[tag=rollup-intro] * <> diff --git a/docs/reference/data-streams/data-stream-apis.asciidoc b/docs/reference/data-streams/data-stream-apis.asciidoc index a25c5728be597..d3580ca4448a7 100644 --- a/docs/reference/data-streams/data-stream-apis.asciidoc +++ b/docs/reference/data-streams/data-stream-apis.asciidoc @@ -15,6 +15,8 @@ The following APIs are available for managing <>: [[data-stream-lifecycle-api]] The following APIs are available for managing the built-in lifecycle of data streams: +preview::[] + * <> preview:[] * <> diff --git a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc index a3bdf20b85715..fd481d7ca4815 100644 --- a/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/delete-lifecycle.asciidoc @@ -4,7 +4,7 @@ Delete Data Stream Lifecycle ++++ -preview:[] +preview::[] Deletes the lifecycle from a set of data streams. diff --git a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc index 1ccd36f5468e2..a2609dcb78ecf 100644 --- a/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/explain-lifecycle.asciidoc @@ -4,7 +4,7 @@ Explain Data Stream Lifecycle ++++ -preview:[] +preview::[] Retrieves the current data stream lifecycle status for one or more data stream backing indices. diff --git a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc index 66b30b7975c11..f20a3393c191c 100644 --- a/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/get-lifecycle.asciidoc @@ -4,7 +4,7 @@ Get Data Stream Lifecycle ++++ -preview:[] +preview::[] Gets the lifecycle of a set of data streams. diff --git a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc index 81055379fe6da..89b8bbeb880c3 100644 --- a/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc +++ b/docs/reference/data-streams/lifecycle/apis/put-lifecycle.asciidoc @@ -4,7 +4,7 @@ Put Data Stream Lifecycle ++++ -preview:[] +preview::[] Configures the data stream lifecycle for the targeted data streams. @@ -53,6 +53,12 @@ Defaults to `open`. (Optional, string) If defined, every document added to this data stream will be stored at least for this time frame. Any time after this duration the document could be deleted. When empty, every document in this data stream will be stored indefinitely. + +`enabled`:: +(Optional, boolean) +If defined, it turns data streqm lifecycle on/off (`true`/`false`) for this data stream. +A data stream lifecycle that's disabled (`enabled: false`) will have no effect on the +data stream. Defaults to `true`. ==== [[data-streams-put-lifecycle-example]] diff --git a/docs/reference/data-streams/lifecycle/index.asciidoc b/docs/reference/data-streams/lifecycle/index.asciidoc index da837293ab68e..6c0220ef0a80f 100644 --- a/docs/reference/data-streams/lifecycle/index.asciidoc +++ b/docs/reference/data-streams/lifecycle/index.asciidoc @@ -2,7 +2,7 @@ [[data-stream-lifecycle]] == Data stream lifecycle -preview:[] +preview::[] A data stream lifecycle is the built-in mechanism data streams use to manage their lifecycle. It enables you to easily automate the management of your data streams according to your retention requirements. For example, you could configure @@ -28,7 +28,14 @@ each data stream and performs the following steps: 1. Checks if the data stream has a data stream lifecycle configured, skipping any indices not part of a managed data stream. 2. Rolls over the write index of the data stream, if it fulfills the conditions defined by <>. -3. Applies retention to the remaining backing indices. This means deleting the backing indices whose +3. After an index is not the write index anymore (i.e. the data stream has been rolled over), +automatically tail merges the index. Data stream lifecycle executes a merge operation that only targets +the long tail of small segments instead of the whole shard. As the segments are organised +into tiers of exponential sizes, merging the long tail of small segments is only a +fraction of the cost of force mergeing to a single segment. The small segments would usually +hold the most recent data so tail mergeing will focus the merging resources on the higher-value +data that is most likely to keep being queried. +4. Applies retention to the remaining backing indices. This means deleting the backing indices whose `generation_time` is longer than the configured retention period. The `generation_time` is only applicable to rolled over backing indices and it is either the time since the backing index got rolled over, or the time optionally configured in the <> setting. @@ -37,7 +44,7 @@ IMPORTANT: We use the `generation_time` instead of the creation time because thi index have passed the retention period. As a result, the retention period is not the exact time data gets deleted, but the minimum time data will be stored. -NOTE: The steps `2` and `3` apply only to backing indices that are not already managed by {ilm-init}, meaning that these indices either do +NOTE: Steps `2-4` apply only to backing indices that are not already managed by {ilm-init}, meaning that these indices either do not have an {ilm-init} policy defined, or if they do, they have <> set to `false`. @@ -55,6 +62,7 @@ that matches the name of your data stream (see <> to edit the lifecycle on the data stream itself (see <>). +* Migrate an existing {ilm-init} managed data stream to Data stream lifecycle using <>. NOTE: Updating the data stream lifecycle of an existing data stream is different from updating the settings or the mapping, because it is applied on the data stream level and not on the individual backing indices. @@ -62,3 +70,5 @@ because it is applied on the data stream level and not on the individual backing include::tutorial-manage-new-data-stream.asciidoc[] include::tutorial-manage-existing-data-stream.asciidoc[] + +include::tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc[] diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc index e7b4d99d25d80..5670faaade3ce 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-existing-data-stream.asciidoc @@ -2,7 +2,7 @@ [[tutorial-manage-existing-data-stream]] === Tutorial: Update existing data stream -preview:[] +preview::[] To update the lifecycle of an existing data stream you do the following actions: diff --git a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc index f22a0ae736d9e..6f1d81ab6ead2 100644 --- a/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc +++ b/docs/reference/data-streams/lifecycle/tutorial-manage-new-data-stream.asciidoc @@ -2,7 +2,7 @@ [[tutorial-manage-new-data-stream]] === Tutorial: Create a data stream with a lifecycle -preview:[] +preview::[] To create a data stream with a built-in lifecycle, follow these steps: diff --git a/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc new file mode 100644 index 0000000000000..de11bbcfc2d4e --- /dev/null +++ b/docs/reference/data-streams/lifecycle/tutorial-migrate-data-stream-from-ilm-to-dsl.asciidoc @@ -0,0 +1,460 @@ +[role="xpack"] +[[tutorial-migrate-data-stream-from-ilm-to-dsl]] +=== Tutorial: Migrate ILM managed data stream to Data stream lifecycle + +preview::[] + +In this tutorial we'll look at migrating an existing data stream from {ilm-init} to +Data stream lifecycle. The existing {ilm-init} managed backing indices will continue +to be managed by {ilm-init} until they age out and get deleted by {ilm-init}; however, +the new backing indices will be managed by Data stream lifecycle. +This way, a data stream is gradually migrated away from being managed by {ilm-cap} to +being managed by Data stream lifecycle. As we'll see, {ilm-cap} and Data stream lifecycle +can co-manage a data stream; however, an index can only be managed by one system at +a time. + +Let's first create a data stream with two backing indices managed by {ilm-cap}. +We first create an {ilm-cap} policy: + +[source,console] +---- +PUT _ilm/policy/pre-dsl-ilm-policy +{ + "policy": { + "phases": { + "hot": { + "actions": { + "rollover": { + "max_primary_shard_size": "50gb" + } + } + }, + "delete": { + "min_age": "7d", + "actions": { + "delete": {} + } + } + } + } +} +---- + +And let's create an index template that'll back the data stream and configures {ilm-cap}: + +[source,console] +---- +PUT _index_template/dsl-data-stream-template +{ + "index_patterns": ["dsl-data-stream*"], + "data_stream": { }, + "priority": 500, + "template": { + "settings": { + "index.lifecycle.name": "pre-dsl-ilm-policy" + } + } +} +---- +// TEST[continued] + +We'll now index a document targetting `dsl-data-stream` to create the data stream +and we'll also manually rollover the data stream to have another generation index created: + +[source,console] +---- +POST dsl-data-stream/_doc? +{ + "@timestamp": "2023-10-18T16:21:15.000Z", + "message": "192.0.2.42 - - [06/May/2099:16:21:15 +0000] \"GET /images/bg.jpg HTTP/1.0\" 200 24736" +} +---- +// TEST[continued] + +[source,console] +---- +POST dsl-data-stream/_rollover +---- +// TEST[continued] + +We'll use the <> API to inspect the state of +the data stream: + +[source,console] +-------------------------------------------------- +GET _data_stream/dsl-data-stream +-------------------------------------------------- +// TEST[continued] + +Inspecting the response we'll see that both backing indices are managed by {ilm-init} +and that the next generation index will also be managed by {ilm-init}: + +[source,console-result] +---- +{ + "data_streams": [ + { + "name": "dsl-data-stream", + "timestamp_field": { + "name": "@timestamp" + }, + "indices": [ + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000001", <1> + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, <2> + "ilm_policy": "pre-dsl-ilm-policy", <3> + "managed_by": "Index Lifecycle Management" <4> + }, + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000002", + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" + } + ], + "generation": 2, + "status": "GREEN", + "template": "dsl-data-stream-template", + "next_generation_managed_by": "Index Lifecycle Management", <5> + "prefer_ilm": true, <6> + "ilm_policy": "pre-dsl-ilm-policy", <7> + "hidden": false, + "system": false, + "allow_custom_routing": false, + "replicated": false + } + ] +} +---- +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] +// TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] + +<1> The name of the backing index. +<2> For each backing index we display the value of the <> +configuration which will indicate if {ilm-init} takes precedence over data stream lifecycle in case +both systems are configured for an index. +<3> The {ilm-ini} policy configured for this index. +<4> The system that manages this index (possible values are "Index Lifecycle Management", +"Data stream lifecycle", or "Unmanaged") +<5> The system that will manage the next generation index (the new write index of this +data stream, once the data stream is rolled over). The possible values are +"Index Lifecycle Management", "Data stream lifecycle", or "Unmanaged". +<6> The <> value configured in the index template +that's backing the data stream. This value will be configured for all the new backing indices. +If it's not configured in the index template the backing indices will receive the `true` +default value ({ilm-init} takes precedence over data stream lifecycle by default as it's +currently richer in features). +<7> The {ilm-init} policy configured in the index template that's backing this data +stream (which will be configured on all the new backing indices, as long as it exists +in the index template). + +To migrate the `dsl-data-stream` to data stream lifecycle we'll have to execute +two steps: + +1. Update the index template that's backing the index template to configure <> +to `false`, and to configure data stream lifecycle. +2. Configure the data stream lifecycle for the _existing_ `dsl-data-stream` using +the <>. + +IMPORTANT: The data stream lifecycle configuration that's added to the index template, +being a data stream configuration, will only apply to **new** data streams. +Our data stream exists already, so even though we added a data stream lifecycle +configuration in the index template it will not be applied to `dsl-data-stream`. + + +[[update-index-template-for-dsl]] +Let's update the index template: + +[source,console] +---- +PUT _index_template/dsl-data-stream-template +{ + "index_patterns": ["dsl-data-stream*"], + "data_stream": { }, + "priority": 500, + "template": { + "settings": { + "index.lifecycle.name": "pre-dsl-ilm-policy", + "index.lifecycle.prefer_ilm": false <1> + }, + "lifecycle": { + "data_retention": "7d" <2> + } + } +} +---- +// TEST[continued] + +<1> The `prefer_ilm` setting will now be configured on the **new** backing indices +(created by rolling over the data stream) such that {ilm-init} does _not_ take +precedence over Data stream lifecycle. +<2> We're configuring the data stream lifecycle so _new_ data streams will be +managed by Data stream lifecycle. + +We've now make sure that new data streams will be managed by Data stream lifecycle. + +Let's update our existing `dsl-data-stream` and configure Data stream lifecycle: + +[source,console] +---- +PUT _data_stream/dsl-data-stream/_lifecycle +{ + "data_retention": "7d" +} +---- +// TEST[continued] + +We can inspect the data stream to check that the next generation will indeed be +managed by Data stream lifecycle: + +[source,console] +-------------------------------------------------- +GET _data_stream/dsl-data-stream +-------------------------------------------------- +// TEST[continued] + +[source,console-result] +---- +{ + "data_streams": [ + { + "name": "dsl-data-stream", + "timestamp_field": { + "name": "@timestamp" + }, + "indices": [ + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000001", + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" <1> + }, + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000002", + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" <2> + } + ], + "generation": 2, + "status": "GREEN", + "template": "dsl-data-stream-template", + "lifecycle": { + "enabled": true, + "data_retention": "7d" + }, + "ilm_policy": "pre-dsl-ilm-policy", + "next_generation_managed_by": "Data stream lifecycle", <3> + "prefer_ilm": false, <4> + "hidden": false, + "system": false, + "allow_custom_routing": false, + "replicated": false + } + ] +} +---- +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] +// TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] + +<1> The existing backing index will continue to be managed by {ilm-init} +<2> The existing backing index will continue to be managed by {ilm-init} +<3> The next generation index will be managed by Data stream lifecycle +<4> The `prefer_ilm` setting value we configured in the index template is reflected +and will be configured accordingly for new backing indices. + +We'll now rollover the data stream to see the new generation index being managed by +Data stream lifecycle: + +[source,console] +---- +POST dsl-data-stream/_rollover +---- +// TEST[continued] + +[source,console] +---- +GET _data_stream/dsl-data-stream +---- +// TEST[continued] + +[source,console-result] +---- +{ + "data_streams": [ + { + "name": "dsl-data-stream", + "timestamp_field": { + "name": "@timestamp" + }, + "indices": [ + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000001", + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" <1> + }, + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000002", + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" <2> + }, + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000003", + "index_uuid": "PA_JquKGSiKcAKBA8abcd1", + "prefer_ilm": false, <3> + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Data stream lifecycle" <4> + } + ], + "generation": 3, + "status": "GREEN", + "template": "dsl-data-stream-template", + "lifecycle": { + "enabled": true, + "data_retention": "7d" + }, + "ilm_policy": "pre-dsl-ilm-policy", + "next_generation_managed_by": "Data stream lifecycle", + "prefer_ilm": false, + "hidden": false, + "system": false, + "allow_custom_routing": false, + "replicated": false + } + ] +} +---- +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] +// TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000003"/"index_name": $body.data_streams.0.indices.2.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8abcd1"/"index_uuid": $body.data_streams.0.indices.2.index_uuid/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] + +<1> The backing indices that existed before rollover will continue to be managed by {ilm-init} +<2> The backing indices that existed before rollover will continue to be managed by {ilm-init} +<3> The new write index received the `false` value for the `prefer_ilm` setting, as we configured +in the index template +<4> The new write index is managed by `Data stream lifecycle` + +We can easily change this data stream to be managed by {ilm-cap} because we didn't remove +the {ilm-cap} policy when we <>. + +We can achieve this in two ways: + +1. <> from the data streams +2. Disable Data stream lifecycle by configured the `enabled` flag to `false`. + +Let's implement option 2 and disable the data stream lifecycle: + +[source,console] +---- +PUT _data_stream/dsl-data-stream/_lifecycle +{ + "data_retention": "7d", + "enabled": false <1> +} +---- +// TEST[continued] +<1> The `enabled` flag can be ommitted and defaults to `true` however, here we +explicitly configure it to `false` +Let's check the state of the data stream: + +[source,console] +---- +GET _data_stream/dsl-data-stream +---- +// TEST[continued] + +[source,console-result] +---- +{ + "data_streams": [ + { + "name": "dsl-data-stream", + "timestamp_field": { + "name": "@timestamp" + }, + "indices": [ + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000001", + "index_uuid": "xCEhwsp8Tey0-FLNFYVwSg", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" + }, + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000002", + "index_uuid": "PA_JquKGSiKcAKBA8DJ5gw", + "prefer_ilm": true, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" + }, + { + "index_name": ".ds-dsl-data-stream-2023.10.19-000003", + "index_uuid": "PA_JquKGSiKcAKBA8abcd1", + "prefer_ilm": false, + "ilm_policy": "pre-dsl-ilm-policy", + "managed_by": "Index Lifecycle Management" <1> + } + ], + "generation": 3, + "status": "GREEN", + "template": "dsl-data-stream-template", + "lifecycle": { + "enabled": false, <2> + "data_retention": "7d" + }, + "ilm_policy": "pre-dsl-ilm-policy", + "next_generation_managed_by": "Index Lifecycle Management", <3> + "prefer_ilm": false, + "hidden": false, + "system": false, + "allow_custom_routing": false, + "replicated": false + } + ] +} +---- +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000001"/"index_name": $body.data_streams.0.indices.0.index_name/] +// TESTRESPONSE[s/"index_uuid": "xCEhwsp8Tey0-FLNFYVwSg"/"index_uuid": $body.data_streams.0.indices.0.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000002"/"index_name": $body.data_streams.0.indices.1.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8DJ5gw"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] +// TESTRESPONSE[s/"index_name": ".ds-dsl-data-stream-2023.10.19-000003"/"index_name": $body.data_streams.0.indices.2.index_name/] +// TESTRESPONSE[s/"index_uuid": "PA_JquKGSiKcAKBA8abcd1"/"index_uuid": $body.data_streams.0.indices.2.index_uuid/] +// TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] +<1> The write index is now managed by {ilm-cap} +<2> The `lifecycle` configured on the data stream is now disabled. +<3> The next write index will be managed by {ilm-cap} + +Had we removed the {ilm-cap} policy from the index template when we <> +it, the write index of the data stream will now be `Unmanaged` because the index +wouldn't have the {ilm-cap} policy configured to fallback onto. + +////////////////////////// +[source,console] +-------------------------------------------------- +DELETE _data_stream/dsl-data-stream +DELETE _index_template/dsl-data-stream-template +DELETE _ilm/policy/pre-dsl-ilm-policy +-------------------------------------------------- +// TEST[continued] + +////////////////////////// + diff --git a/docs/reference/data-streams/set-up-tsds.asciidoc b/docs/reference/data-streams/set-up-tsds.asciidoc index a98e3c7302424..c175da2e991e9 100644 --- a/docs/reference/data-streams/set-up-tsds.asciidoc +++ b/docs/reference/data-streams/set-up-tsds.asciidoc @@ -192,8 +192,7 @@ PUT _component_template/my-weather-sensor-settings "template": { "settings": { "index.lifecycle.name": "my-lifecycle-policy", - "index.look_ahead_time": "3h", - "index.codec": "best_compression" + "index.look_ahead_time": "3h" } }, "_meta": { diff --git a/docs/reference/esql/esql-commands.asciidoc b/docs/reference/esql/esql-commands.asciidoc index 93508331d6be1..8b0e99344add1 100644 --- a/docs/reference/esql/esql-commands.asciidoc +++ b/docs/reference/esql/esql-commands.asciidoc @@ -1,17 +1,63 @@ [[esql-commands]] -== {esql} commands +=== {esql} commands ++++ Commands ++++ -{esql} provides a comprehensive set of source and processing commands: +// tag::source_commands[] +==== Source commands -<>:: -include::source-commands/esql-source-commands.asciidoc[tag=list] +An {esql} source command produces a table, typically with data from {es}. An {esql} query must start with a source command. -<>:: -include::processing-commands/esql-processing-commands.asciidoc[tag=list] +image::images/esql/source-command.svg[A source command producing a table from {es},align="center"] -include::source-commands/esql-source-commands.asciidoc[] -include::processing-commands/esql-processing-commands.asciidoc[] +{esql} supports these source commands: + +* <> +* <> +* <> + +// end::source_command[] + +// tag::proc_commands[] +==== Processing commands + +{esql} processing commands change an input table by adding, removing, or changing +rows and columns. + +image::images/esql/processing-command.svg[A processing command changing an input table,align="center"] + +{esql} supports these processing commands: + +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> + +// end::proc_command[] + +include::source-commands/from.asciidoc[] +include::source-commands/row.asciidoc[] +include::source-commands/show.asciidoc[] + +include::processing-commands/dissect.asciidoc[] +include::processing-commands/drop.asciidoc[] +include::processing-commands/enrich.asciidoc[] +include::processing-commands/eval.asciidoc[] +include::processing-commands/grok.asciidoc[] +include::processing-commands/keep.asciidoc[] +include::processing-commands/limit.asciidoc[] +include::processing-commands/mv_expand.asciidoc[] +include::processing-commands/rename.asciidoc[] +include::processing-commands/sort.asciidoc[] +include::processing-commands/stats.asciidoc[] +include::processing-commands/where.asciidoc[] diff --git a/docs/reference/esql/esql-enrich-data.asciidoc b/docs/reference/esql/esql-enrich-data.asciidoc new file mode 100644 index 0000000000000..69cc7817d2224 --- /dev/null +++ b/docs/reference/esql/esql-enrich-data.asciidoc @@ -0,0 +1,139 @@ +[[esql-enrich-data]] +=== Data enrichment + +++++ +Data enrichment +++++ + +The {esql} <> processing command combines, at query-time, data from +one or more source indexes with field-value combinations found in {es} enrich +indexes. + +For example, you can use `ENRICH` to: + +* Identify web services or vendors based on known IP addresses +* Add product information to retail orders based on product IDs +* Supplement contact information based on an email address + +[discrete] +[[esql-how-enrich-works]] +==== How the `ENRICH` command works + +The `ENRICH` command adds new columns to a table, with data from {es} indices. +It requires a few special components: + +image::images/esql/esql-enrich.png[align="center"] + + +[[esql-enrich-policy]] +Enrich policy:: ++ +-- +A set of configuration options used to add the right enrich data to the input +table. + +An enrich policy contains: + +include::../ingest/enrich.asciidoc[tag=enrich-policy-fields] + +After <>, it must be +<> before it can be used. Executing an +enrich policy uses data from the policy's source indices to create a streamlined +system index called the _enrich index_. The `ENRICH` command uses this index to +match and enrich an input table. +-- + +[[esql-source-index]] +Source index:: +An index which stores enrich data that the `ENRICH` command can add to input +tables. You can create and manage these indices just like a regular {es} index. +You can use multiple source indices in an enrich policy. You also can use the +same source index in multiple enrich policies. + +[[esql-enrich-index]] +Enrich index:: ++ +-- +A special system index tied to a specific enrich policy. + +Directly matching rows from input tables to documents in source indices could be +slow and resource intensive. To speed things up, the `ENRICH` command uses an +enrich index. + +include::../ingest/enrich.asciidoc[tag=enrich-index] +-- + +[discrete] +[[esql-set-up-enrich-policy]] +==== Set up an enrich policy + +To start using `ENRICH`, follow these steps: + +. Check the <>. +. <>. +. <>. +. <>. +. <> + +Once you have enrich policies set up, you can <> and <>. + +[discrete] +[IMPORTANT] +==== +The `ENRICH` command performs several operations and may impact the speed of +your query. +[discrete] +==== + +[discrete] +[[esql-enrich-prereqs]] +==== Prerequisites + +include::{es-repo-dir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich-policy-api-prereqs] + +[discrete] +[[esql-create-enrich-source-index]] +==== Add enrich data + +include::../ingest/enrich.asciidoc[tag=create-enrich-source-index] + +[discrete] +[[esql-create-enrich-policy]] +==== Create an enrich policy + +include::../ingest/enrich.asciidoc[tag=create-enrich-policy] + +[discrete] +[[esql-execute-enrich-policy]] +==== Execute the enrich policy + +include::../ingest/enrich.asciidoc[tag=execute-enrich-policy1] + +image::images/esql/esql-enrich-policy.png[align="center"] + +include::../ingest/enrich.asciidoc[tag=execute-enrich-policy2] + +[discrete] +[[esql-use-enrich]] +==== Use the enrich policy + +After the policy has been executed, you can use the <> to enrich your data. + +image::images/esql/esql-enrich-command.png[align="center",width=50%] + +include::processing-commands/enrich.asciidoc[tag=examples] + +[discrete] +[[esql-update-enrich-data]] +==== Update an enrich index + +include::{es-repo-dir}/ingest/apis/enrich/execute-enrich-policy.asciidoc[tag=update-enrich-index] + +[discrete] +[[esql-update-enrich-policies]] +==== Update an enrich policy + +include::../ingest/enrich.asciidoc[tag=update-enrich-policy] diff --git a/docs/reference/esql/esql-examples.asciidoc b/docs/reference/esql/esql-examples.asciidoc new file mode 100644 index 0000000000000..569dcf1172b38 --- /dev/null +++ b/docs/reference/esql/esql-examples.asciidoc @@ -0,0 +1,99 @@ +[[esql-examples]] +== {esql} examples + +++++ +Examples +++++ + + +[discrete] +=== Aggregating and enriching windows event logs + +[source,esql] +---- +FROM logs-* +| WHERE event.code IS NOT NULL +| STATS event_code_count = count(event.code) by event.code,host.name +| ENRICH win_events on event.code with event_description +| WHERE event_description IS NOT NULL and host.name IS NOT NULL +| RENAME event_description as event.description +| SORT event_code_count desc +| KEEP event_code_count,event.code,host.name,event.description +---- + +* It starts by querying logs from indices that match the pattern "logs-*". +* Filters events where the "event.code" field is not null. +* Aggregates the count of events by "event.code" and "host.name." +* Enriches the events with additional information using the "EVENT_DESCRIPTION" field. +* Filters out events where "EVENT_DESCRIPTION" or "host.name" is null. +* Renames "EVENT_DESCRIPTION" as "event.description." +* Sorts the result by "event_code_count" in descending order. +* Keeps only selected fields: "event_code_count," "event.code," "host.name," and "event.description." + + +[discrete] +=== Summing outbound traffic from a process `curl.exe` + +[source,esql] +---- +FROM logs-endpoint +| WHERE process.name == "curl.exe" +| STATS bytes = SUM(destination.bytes) BY destination.address +| EVAL kb = bytes/1024 +| SORT kb desc +| LIMIT 10 +| KEEP kb,destination.address +---- + +* Queries logs from the "logs-endpoint" source. +* Filters events where the "process.name" field is "curl.exe." +* Calculates the sum of bytes sent to destination addresses and converts it to kilobytes (KB). +* Sorts the results by "kb" (kilobytes) in descending order. +* Limits the output to the top 10 results. +* Keeps only the "kb" and "destination.address" fields. + + +[discrete] +=== Manipulating DNS logs to find a high number of unique dns queries per registered domain + +[source,esql] +---- +FROM logs-* +| GROK dns.question.name "%{DATA}\\.%{GREEDYDATA:dns.question.registered_domain:string}" +| STATS unique_queries = count_distinct(dns.question.name) by dns.question.registered_domain, process.name +| WHERE unique_queries > 10 +| SORT unique_queries DESC +| RENAME unique_queries AS `Unique Queries`, dns.question.registered_domain AS `Registered Domain`, process.name AS `Process` +---- + +* Queries logs from indices matching "logs-*." +* Uses the "grok" pattern to extract the registered domain from the "dns.question.name" field. +* Calculates the count of unique DNS queries per registered domain and process name. +* Filters results where "unique_queries" are greater than 10. +* Sorts the results by "unique_queries" in descending order. +* Renames fields for clarity: "unique_queries" to "Unique Queries," "dns.question.registered_domain" to "Registered Domain," and "process.name" to "Process." + + +[discrete] +=== Identifying high-numbers of outbound user connections + +[source,esql] +---- +FROM logs-* +| WHERE NOT CIDR_MATCH(destination.ip, "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16") +| STATS destcount = COUNT(destination.ip) BY user.name, host.name +| ENRICH ldap_lookup_new ON user.name +| WHERE group.name IS NOT NULL +| EVAL follow_up = CASE(destcount >= 100, "true","false") +| SORT destcount desc +| KEEP destcount, host.name, user.name, group.name, follow_up +---- + +* Queries logs from indices matching "logs-*." +* Filters out events where the destination IP address falls within private IP address ranges (e.g., 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16). +* Calculates the count of unique destination IPs by "user.name" and "host.name." +* Enriches the "user.name" field with LDAP group information. +* Filters out results where "group.name" is not null. +* Uses a "CASE" statement to create a "follow_up" field, setting it to "true" when "destcount" is greater than or equal to 100 and "false" otherwise. +* Sorts the results by "destcount" in descending order. +* Keeps selected fields: "destcount," "host.name," "user.name," "group.name," and "follow_up." diff --git a/docs/reference/esql/esql-functions-operators.asciidoc b/docs/reference/esql/esql-functions-operators.asciidoc index 279c273278eb9..375bb4ee9dd00 100644 --- a/docs/reference/esql/esql-functions-operators.asciidoc +++ b/docs/reference/esql/esql-functions-operators.asciidoc @@ -1,36 +1,37 @@ [[esql-functions-operators]] -== {esql} functions and operators +=== {esql} functions and operators ++++ Functions and operators ++++ -{esql} provides a comprehensive set of functions and operators: +{esql} provides a comprehensive set of functions and operators for working with data. +The functions are divided into the following categories: [[esql-functions]] <>:: -include::functions/aggregation-functions.asciidoc[tag=list] +include::functions/aggregation-functions.asciidoc[tag=agg_list] <>:: -include::functions/math-functions.asciidoc[tag=list] +include::functions/math-functions.asciidoc[tag=math_list] <>:: -include::functions/string-functions.asciidoc[tag=list] +include::functions/string-functions.asciidoc[tag=string_list] <>:: -include::functions/date-time-functions.asciidoc[tag=list] +include::functions/date-time-functions.asciidoc[tag=date_list] <>:: -include::functions/type-conversion-functions.asciidoc[tag=list] +include::functions/type-conversion-functions.asciidoc[tag=type_list] <>:: -include::functions/conditional-functions-and-expressions.asciidoc[tag=list] +include::functions/conditional-functions-and-expressions.asciidoc[tag=cond_list] <>:: -include::functions/mv-functions.asciidoc[tag=list] +include::functions/mv-functions.asciidoc[tag=mv_list] <>:: -include::functions/operators.asciidoc[tag=list] +include::functions/operators.asciidoc[tag=op_list] include::functions/aggregation-functions.asciidoc[] include::functions/math-functions.asciidoc[] diff --git a/docs/reference/esql/esql-get-started.asciidoc b/docs/reference/esql/esql-get-started.asciidoc index 1f3cdf85c173e..676ad0ca0bf10 100644 --- a/docs/reference/esql/esql-get-started.asciidoc +++ b/docs/reference/esql/esql-get-started.asciidoc @@ -5,4 +5,30 @@ Getting started ++++ -coming::[8.11] \ No newline at end of file +A simple example of an {esql} query is shown below: +[source,esql] +---- +FROM employees +| EVAL age = DATE_DIFF(NOW(), birth_date, 'Y') +| STATS AVG(age) BY department +| SORT age DESC +---- + +Each {esql} query starts with a <>. A source command produces +a table, typically with data from {es}. + +image::images/esql/source-command.svg[A source command producing a table from {es},align="center"] + +A source command can be followed by one or more +<>. Processing commands change an +input table by adding, removing, or changing rows and columns. +Processing commands can perform filtering, projection, aggregation, and more. + +image::images/esql/processing-command.svg[A processing command changing an input table,align="center"] + +You can chain processing commands, separated by a pipe character: `|`. Each +processing command works on the output table of the previous command. + +image::images/esql/chaining-processing-commands.svg[Processing commands can be chained,align="center"] + +The result of a query is the table produced by the final processing command. diff --git a/docs/reference/esql/esql-kibana.asciidoc b/docs/reference/esql/esql-kibana.asciidoc index 405d8d816ac15..b8709364367b2 100644 --- a/docs/reference/esql/esql-kibana.asciidoc +++ b/docs/reference/esql/esql-kibana.asciidoc @@ -1,8 +1,256 @@ [[esql-kibana]] -== Using {esql} in {kib} +=== Using {esql} in {kib} ++++ -Kibana +Using {esql} in {kib} ++++ -coming::[8.11] \ No newline at end of file +You can use {esql} in {kib} to query and aggregate your data, create +visualizations, and set up alerts. + +This guide shows you how to use {esql} in Kibana. To follow along with the +queries, load the "Sample web logs" sample data set by clicking *Try sample +data* from the {kib} Home, selecting *Other sample data sets*, and clicking *Add +data* on the *Sample web logs* card. + +[discrete] +[[esql-kibana-get-started]] +=== Get started with {esql} + +To get started with {esql} in Discover, open the main menu and select +*Discover*. Next, from the Data views menu, select *Try ES|QL*. + +image::images/esql/esql-data-view-menu.png[align="center",width=33%] + +The ability to select {esql} from the Data views menu can be enabled and +disabled using the `discover:enableESQL` setting from +{kibana-ref}/advanced-options.html[Advanced Settings]. + +[discrete] +[[esql-kibana-query-bar]] +=== The query bar + +After switching to {esql} mode, the query bar shows a sample query. For example: + +[source,esql] +---- +from kibana_sample_data_logs | limit 10 +---- + +Every query starts with a <>. In this query, the +source command is <>. `FROM` retrieves data from data streams, indices, or +aliases. In this example, the data is retrieved from `kibana_sample_data_logs`. + +A source command can be followed by one or more <>. In this query, the processing command is <>. `LIMIT` +limits the number of rows that are retrieved. + +TIP: Click the help icon (image:images/esql/esql-icon-help.svg[]) to open the +in-product reference documentation for all commands and functions. + +To make it easier to write queries, auto-complete offers suggestions with +possible commands and functions: + +image::images/esql/esql-kibana-auto-complete.png[align="center"] + +[NOTE] +==== +{esql} keywords are case-insensitive. The following query is identical to the +previous one: + +[source,esql] +---- +FROM kibana_sample_data_logs | LIMIT 10 +---- +==== + +[discrete] +==== Expand the query bar + +For readability, you can put each processing command on a new line. The +following query is identical to the previous one: + +[source,esql] +---- +FROM kibana_sample_data_logs +| LIMIT 10 +---- + +To make it easier to write multi-line queries, click the double-headed arrow +button (image:images/esql/esql-icon-expand-query-bar.svg[]) to expand the query +bar: + +image::images/esql/esql-expanded-query-bar.png[align="center"] + +To return to a compact query bar, click the minimize editor button +(image:images/esql/esql-icon-minimize-query-bar.svg[]). + +[discrete] +==== Warnings + +A query may result in warnings, for example when querying an unsupported field +type. When that happens, a warning symbol is shown in the query bar. To see the +detailed warning, expand the query bar, and click *warnings*. + +[discrete] +[[esql-kibana-results-table]] +=== The results table + +For the example query, the results table shows 10 rows. Omitting the `LIMIT` +command, the results table defaults to up to 500 rows. Using `LIMIT`, you can +increase the limit to up to 10,000 rows. + +NOTE: the 10,000 row limit only applies to the number of rows that are retrieved +by the query and displayed in Discover. Any query or aggregation runs on the +full data set. + +Each row shows two columns for the example query: a column with the `@timestamp` +field and a column with the full document. To display specific fields from the +documents, use the <> command: + +[source,esql] +---- +FROM kibana_sample_data_logs +| KEEP @timestamp, bytes, geo.dest +---- + +To display all fields as separate columns, use `KEEP *`: + +[source,esql] +---- +FROM kibana_sample_data_logs +| KEEP * +---- + +NOTE: The maximum number of columns in Discover is 50. If a query returns more +than 50 columns, Discover only shows the first 50. + +[discrete] +==== Sorting + +To sort on one of the columns, click the column name you want to sort on and +select the sort order. Note that this performs client-side sorting. It only +sorts the rows that were retrieved by the query, which may not be the full +dataset because of the (implicit) limit. To sort the full data set, use the +<> command: + +[source,esql] +---- +FROM kibana_sample_data_logs +| KEEP @timestamp, bytes, geo.dest +| SORT bytes DESC +---- + +[discrete] +[[esql-kibana-time-filter]] +=== Time filtering + +To display data within a specified time range, use the +{kibana-ref}/set-time-filter.html[time filter]. The time filter is only enabled +when the indices you're querying have a field called `@timestamp`. + +If your indices do not have a timestamp field called `@timestamp`, you can limit +the time range using the <> command and the <> function. +For example, if the timestamp field is called `timestamp`, to query the last 15 +minutes of data: +[source,esql] +---- +FROM kibana_sample_data_logs +| WHERE timestamp > NOW() - 15minutes +---- + +[discrete] +[[esql-kibana-visualizations]] +=== Analyze and visualize data + +Between the query bar and the results table, Discover shows a date histogram +visualization. If the indices you're querying do not contain an `@timestamp` +field, the histogram is not shown. + +The visualization adapts to the query. A query's nature determines the type of +visualization. For example, this query aggregates the total number of bytes per +destination country: + +[source,esql] +---- +FROM kibana_sample_data_logs +| STATS total_bytes = SUM(bytes) BY geo.dest +| SORT total_bytes DESC +| LIMIT 3 +---- + +The resulting visualization is a bar chart showing the top 3 countries: + +image::images/esql/esql-kibana-bar-chart.png[align="center"] + +To change the visualization into another type, click the visualization type +dropdown: + +image::images/esql/esql-kibana-visualization-type.png[align="center",width=33%] + +To make other changes to the visualization, like the axes and colors, click the +pencil button (image:images/esql/esql-icon-edit-visualization.svg[]). This opens +an in-line editor: + +image::images/esql/esql-kibana-in-line-editor.png[align="center"] + +You can save the visualization to a new or existing dashboard by clicking the +save button (image:images/esql/esql-icon-save-visualization.svg[]). Once saved +to a dashboard, you can continue to make changes to visualization. Click the +options button in the top-right (image:images/esql/esql-icon-options.svg[]) and +select *Edit ESQL visualization* to open the in-line editor: + +image::images/esql/esql-kibana-edit-on-dashboard.png[align="center"] + +[discrete] +[[esql-kibana-enrich]] +=== Create an enrich policy + +The {esql} <> command enables you to <> +your query dataset with fields from another dataset. Before you can use +`ENRICH`, you need to <>. If a policy exists, it will be suggested by auto-complete. If not, +click *Click to create* to create one. + +image::images/esql/esql-kibana-enrich-autocomplete.png[align="center"] + +Next, you can enter a policy name, the policy type, source indices, and +optionally a query: + +image::images/esql/esql-kibana-enrich-step-1.png[align="center",width="50%"] + +Click *Next* to select the match field and enrich fields: + +image::images/esql/esql-kibana-enrich-step-2.png[align="center",width="50%"] + +Finally, click *Create and execute*. + +Now, you can use the enrich policy in an {esql} query: + +image::images/esql/esql-kibana-enriched-data.png[align="center"] + +[discrete] +[[esql-kibana-alerting-rule]] +=== Create an alerting rule + +You can use {esql} queries to create alerts. From Discover, click *Alerts* and +select *Create search threshold rule*. This opens a panel that enables you to +create a rule using an {esql} query. Next, you can test the query, add a +connector, and save the rule. + +image::images/esql/esql-kibana-create-rule.png[align="center",width=50%] + +[discrete] +[[esql-kibana-limitations]] +=== Limitations + +* The user interface to filter data is not enabled when Discover is in {esql} +mode. To filter data, write a query that uses the <> command +instead. +* In {esql} mode, clicking a field in the field list in Discover does not show +quick statistics for that field. +* Discover shows no more than 10,000 rows. This limit only applies to the number +of rows that are retrieved by the query and displayed in Discover. Any query or +aggregation runs on the full data set. +* Discover shows no more than 50 columns. If a query returns +more than 50 columns, Discover only shows the first 50. \ No newline at end of file diff --git a/docs/reference/esql/esql-language.asciidoc b/docs/reference/esql/esql-language.asciidoc index 10af5a6e9702c..8ffc0af7cbeb2 100644 --- a/docs/reference/esql/esql-language.asciidoc +++ b/docs/reference/esql/esql-language.asciidoc @@ -1,16 +1,24 @@ [[esql-language]] -== {esql} language +== Learning {esql} ++++ -{esql} language +Learning {esql} ++++ Detailed information about the {esql} language: * <> -* <> +* <> +* <> * <> +* <> +* <> +* <> include::esql-syntax.asciidoc[] +include::esql-commands.asciidoc[] +include::esql-functions-operators.asciidoc[] +include::metadata-fields.asciidoc[] include::multivalued-fields.asciidoc[] -include::metadata-fields.asciidoc[] \ No newline at end of file +include::esql-process-data-with-dissect-grok.asciidoc[] +include::esql-enrich-data.asciidoc[] \ No newline at end of file diff --git a/docs/reference/esql/esql-limitations.asciidoc b/docs/reference/esql/esql-limitations.asciidoc index f39ff73744276..2fa0b628352b1 100644 --- a/docs/reference/esql/esql-limitations.asciidoc +++ b/docs/reference/esql/esql-limitations.asciidoc @@ -5,11 +5,13 @@ Limitations ++++ +This is work in progress. + [discrete] [[esql-supported-types]] === Supported types -* {esql} currently supports the following <>: +{esql} currently supports the following <>: ** `alias` ** `boolean` @@ -24,9 +26,15 @@ ** `unsigned_long` ** `version` +[discrete] +[[esql-tsdb]] +=== {esql} and time series data + +{esql} does not support time series data (TSDS). + [discrete] [[esql-max-rows]] === 10,000 row maximum A single query will not return more than 10,000 rows, regardless of the -`LIMIT` command's value. \ No newline at end of file +`LIMIT` command's value. diff --git a/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc b/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc new file mode 100644 index 0000000000000..43b4a2a15f92f --- /dev/null +++ b/docs/reference/esql/esql-process-data-with-dissect-grok.asciidoc @@ -0,0 +1,258 @@ +[[esql-process-data-with-dissect-and-grok]] +=== Data processing with DISSECT and GROK + +++++ +Data processing with DISSECT and GROK +++++ + +Your data may contain unstructured strings that you want to structure. This +makes it easier to analyze the data. For example, log messages may contain IP +addresses that you want to extract so you can find the most active IP addresses. + +image::images/esql/unstructured-data.png[align="center",width=75%] + +{es} can structure your data at index time or query time. At index time, you can +use the <> and <> ingest +processors, or the {ls} {logstash-ref}/plugins-filters-dissect.html[Dissect] and +{logstash-ref}/plugins-filters-grok.html[Grok] filters. At query time, you can +use the {esql} <> and <> commands. + +[[esql-grok-or-dissect]] +==== `DISSECT` or `GROK`? Or both? + +`DISSECT` works by breaking up a string using a delimiter-based pattern. `GROK` +works similarly, but uses regular expressions. This make `GROK` more powerful, +but generally also slower. `DISSECT` works well when data is reliably repeated. +`GROK` is a better choice when you really need the power of regular expressions, +for example when the structure of your text varies from row to row. + +You can use both `DISSECT` and `GROK` for hybrid use cases. For example when a +section of the line is reliably repeated, but the entire line is not. `DISSECT` +can deconstruct the section of the line that is repeated. `GROK` can process the +remaining field values using regular expressions. + +[[esql-process-data-with-dissect]] +==== Process data with `DISSECT` + +The <> processing command matches a string against a +delimiter-based pattern, and extracts the specified keys as columns. + +For example, the following pattern: +[source,txt] +---- +%{clientip} [%{@timestamp}] %{status} +---- + +matches a log line of this format: +[source,txt] +---- +1.2.3.4 [2023-01-23T12:15:00.000Z] Connected +---- + +and results in adding the following columns to the input table: + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +clientip:keyword | @timestamp:keyword | status:keyword +1.2.3.4 | 2023-01-23T12:15:00.000Z | Connected +|=== + +[[esql-dissect-patterns]] +===== Dissect patterns + +include::../ingest/processors/dissect.asciidoc[tag=intro-example-explanation] + +An empty key `%{}` or a <> can be used to +match values, but exclude the value from the output. + +All matched values are output as keyword string data types. Use the +<> to convert to another data type. + +Dissect also supports <> that can +change dissect's default behavior. For example, you can instruct dissect to +ignore certain fields, append fields, skip over padding, etc. + +[[esql-dissect-terminology]] +===== Terminology + +dissect pattern:: +the set of fields and delimiters describing the textual +format. Also known as a dissection. +The dissection is described using a set of `%{}` sections: +`%{a} - %{b} - %{c}` + +field:: +the text from `%{` to `}` inclusive. + +delimiter:: +the text between `}` and the next `%{` characters. +Any set of characters other than `%{`, `'not }'`, or `}` is a delimiter. + +key:: ++ +-- +the text between the `%{` and `}`, exclusive of the `?`, `+`, `&` prefixes +and the ordinal suffix. + +Examples: + +* `%{?aaa}` - the key is `aaa` +* `%{+bbb/3}` - the key is `bbb` +* `%{&ccc}` - the key is `ccc` +-- + +[[esql-dissect-examples]] +===== Examples + +include::processing-commands/dissect.asciidoc[tag=examples] + +[[esql-dissect-key-modifiers]] +===== Dissect key modifiers + +include::../ingest/processors/dissect.asciidoc[tag=dissect-key-modifiers] + +[[esql-dissect-key-modifiers-table]] +.Dissect key modifiers +[options="header",role="styled"] +|====== +| Modifier | Name | Position | Example | Description | Details +| `->` | Skip right padding | (far) right | `%{keyname1->}` | Skips any repeated characters to the right | <> +| `+` | Append | left | `%{+keyname} %{+keyname}` | Appends two or more fields together | <> +| `+` with `/n` | Append with order | left and right | `%{+keyname/2} %{+keyname/1}` | Appends two or more fields together in the order specified | <> +| `?` | Named skip key | left | `%{?ignoreme}` | Skips the matched value in the output. Same behavior as `%{}`| <> +| `*` and `&` | Reference keys | left | `%{*r1} %{&r1}` | Sets the output key as value of `*` and output value of `&` | <> +|====== + +[[esql-dissect-modifier-skip-right-padding]] +====== Right padding modifier (`->`) +include::../ingest/processors/dissect.asciidoc[tag=dissect-modifier-skip-right-padding] + +[[esql-append-modifier]] +====== Append modifier (`+`) +include::../ingest/processors/dissect.asciidoc[tag=append-modifier] + +[[esql-append-order-modifier]] +====== Append with order modifier (`+` and `/n`) +include::../ingest/processors/dissect.asciidoc[tag=append-order-modifier] + +[[esql-named-skip-key]] +====== Named skip key (`?`) +include::../ingest/processors/dissect.asciidoc[tag=named-skip-key] + +[[esql-reference-keys]] +====== Reference keys (`*` and `&`) +include::../ingest/processors/dissect.asciidoc[tag=reference-keys] + +[[esql-process-data-with-grok]] +==== Process data with `GROK` + +The <> processing command matches a string against a pattern based on +regular expressions, and extracts the specified keys as columns. + +For example, the following pattern: +[source,txt] +---- +%{IP:ip} \[%{TIMESTAMP_ISO8601:@timestamp}\] %{GREEDYDATA:status} +---- + +matches a log line of this format: +[source,txt] +---- +1.2.3.4 [2023-01-23T12:15:00.000Z] Connected +---- + +and results in adding the following columns to the input table: + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +@timestamp:keyword | ip:keyword | status:keyword +2023-01-23T12:15:00.000Z | 1.2.3.4 | Connected +|=== + +[[esql-grok-patterns]] +===== Grok patterns + +The syntax for a grok pattern is `%{SYNTAX:SEMANTIC}` + +The `SYNTAX` is the name of the pattern that matches your text. For example, +`3.44` is matched by the `NUMBER` pattern and `55.3.244.1` is matched by the +`IP` pattern. The syntax is how you match. + +The `SEMANTIC` is the identifier you give to the piece of text being matched. +For example, `3.44` could be the duration of an event, so you could call it +simply `duration`. Further, a string `55.3.244.1` might identify the `client` +making a request. + +By default, matched values are output as keyword string data types. To convert a +semantic's data type, suffix it with the target data type. For example +`%{NUMBER:num:int}`, which converts the `num` semantic from a string to an +integer. Currently the only supported conversions are `int` and `float`. For +other types, use the <>. + +For an overview of the available patterns, refer to +{es-repo}/blob/{branch}/libs/grok/src/main/resources/patterns[GitHub]. You can +also retrieve a list of all patterns using a <>. + +[[esql-grok-regex]] +===== Regular expressions + +Grok is based on regular expressions. Any regular expressions are valid in grok +as well. Grok uses the Oniguruma regular expression library. Refer to +https://github.com/kkos/oniguruma/blob/master/doc/RE[the Oniguruma GitHub +repository] for the full supported regexp syntax. + +[NOTE] +==== +Special regex characters like `[` and `]` need to be escaped with a `\`. For +example, in the earlier pattern: +[source,txt] +---- +%{IP:ip} \[%{TIMESTAMP_ISO8601:@timestamp}\] %{GREEDYDATA:status} +---- + +In {esql} queries, the backslash character itself is a special character that +needs to be escaped with another `\`. For this example, the corresponding {esql} +query becomes: +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=grokWithEscape] +---- +==== + +[[esql-custom-patterns]] +===== Custom patterns + +If grok doesn't have a pattern you need, you can use the Oniguruma syntax for +named capture which lets you match a piece of text and save it as a column: +[source,txt] +---- +(?the pattern here) +---- + +For example, postfix logs have a `queue id` that is a 10 or 11-character +hexadecimal value. This can be captured to a column named `queue_id` with: +[source,txt] +---- +(?[0-9A-F]{10,11}) +---- + +[[esql-grok-examples]] +===== Examples + +include::processing-commands/grok.asciidoc[tag=examples] + +[[esql-grok-debugger]] +===== Grok debugger + +To write and debug grok patterns, you can use the +{kibana-ref}/xpack-grokdebugger.html[Grok Debugger]. It provides a UI for +testing patterns against sample data. Under the covers, it uses the same engine +as the `GROK` command. + +[[esql-grok-limitations]] +===== Limitations + +The `GROK` command does not support configuring <>, or <>. The `GROK` command is not +subject to <>. diff --git a/docs/reference/esql/esql-query-api.asciidoc b/docs/reference/esql/esql-query-api.asciidoc index a6c1d3d598332..437871d31a88f 100644 --- a/docs/reference/esql/esql-query-api.asciidoc +++ b/docs/reference/esql/esql-query-api.asciidoc @@ -6,7 +6,20 @@ Returns search results for an <> query. -include::index.asciidoc[tag=esql-query-api-example] +[source,console] +---- +POST /_query +{ + "query": """ + FROM library + | EVAL year = DATE_TRUNC(1 YEARS, release_date) + | STATS MAX(page_count) BY year + | SORT year + | LIMIT 5 + """ +} +---- +// TEST[setup:library] [discrete] [[esql-query-api-request]] diff --git a/docs/reference/esql/esql-rest.asciidoc b/docs/reference/esql/esql-rest.asciidoc index 28ecfb7eea840..2d47f6e46ff65 100644 --- a/docs/reference/esql/esql-rest.asciidoc +++ b/docs/reference/esql/esql-rest.asciidoc @@ -1,5 +1,5 @@ [[esql-rest]] -== {esql} REST API +=== {esql} REST API ++++ REST API @@ -38,13 +38,14 @@ James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z [discrete] [[esql-kibana-console]] -=== Kibana Console +==== Kibana Console If you are using {kibana-ref}/console-kibana.html[Kibana Console] (which is highly recommended), take advantage of the triple quotes `"""` when creating the query. This not only automatically escapes double quotes (`"`) inside the query string but also supports multi-line requests: +// tag::esql-query-api[] [source,console] ---- POST /_query?format=txt @@ -61,7 +62,7 @@ POST /_query?format=txt [discrete] [[esql-rest-format]] -=== Response formats +==== Response formats {esql} can return the data in the following human readable and binary formats. You can set the format by specifying the `format` parameter in the URL or by @@ -120,7 +121,7 @@ Use the `tsv` format instead. [discrete] [[esql-rest-filtering]] -=== Filtering using {es} Query DSL +==== Filtering using {es} Query DSL Specify a Query DSL query in the `filter` parameter to filter the set of documents that an {esql} query runs on. @@ -160,7 +161,7 @@ Douglas Adams |The Hitchhiker's Guide to the Galaxy|180 |1979-10-12T [discrete] [[esql-rest-columnar]] -=== Columnar results +==== Columnar results By default, {esql} returns results as rows. For example, `FROM` returns each individual document as one row. For the `json`, `yaml`, `cbor` and `smile` @@ -205,7 +206,7 @@ Which returns: [discrete] [[esql-rest-params]] -=== Passing parameters to a query +==== Passing parameters to a query Values, for example for a condition, can be passed to a query "inline", by integrating the value in the query string itself: diff --git a/docs/reference/esql/esql-syntax.asciidoc b/docs/reference/esql/esql-syntax.asciidoc index 805b879ab676e..725b1d3ff1e03 100644 --- a/docs/reference/esql/esql-syntax.asciidoc +++ b/docs/reference/esql/esql-syntax.asciidoc @@ -9,8 +9,8 @@ [[esql-basic-syntax]] === Basic syntax -An {esql} query is composed of a <> followed -by an optional series of <>, +An {esql} query is composed of a <> followed +by an optional series of <>, separated by a pipe character: `|`. For example: [source,esql] diff --git a/docs/reference/esql/esql-using.asciidoc b/docs/reference/esql/esql-using.asciidoc new file mode 100644 index 0000000000000..f586f3a28de5c --- /dev/null +++ b/docs/reference/esql/esql-using.asciidoc @@ -0,0 +1,16 @@ +[[esql-using]] +== Using {esql} + +<>:: +Information about using the <>. + +<>:: +Using {esql} in {kib} to query and aggregate your data, create visualizations, +and set up alerts. + +<>:: +Using the <> to list and cancel {esql} queries. + +include::esql-rest.asciidoc[] +include::esql-kibana.asciidoc[] +include::task-management.asciidoc[] \ No newline at end of file diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index ca16e07c2565c..bd501ea49f158 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -1,5 +1,5 @@ [[esql-agg-functions]] -=== {esql} aggregate functions +==== {esql} aggregate functions ++++ Aggregate functions @@ -7,7 +7,7 @@ The <> function supports these aggregate functions: -// tag::list[] +// tag::agg_list[] * <> * <> * <> @@ -17,7 +17,7 @@ The <> function supports these aggregate functions: * <> * <> * <> -// end::list[] +// end::agg_list[] include::avg.asciidoc[] include::count.asciidoc[] diff --git a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc index 692cb19e19562..d835a14856c03 100644 --- a/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc +++ b/docs/reference/esql/functions/conditional-functions-and-expressions.asciidoc @@ -1,19 +1,19 @@ [[esql-conditional-functions-and-expressions]] -=== {esql} conditional functions and expressions +==== {esql} conditional functions and expressions ++++ Conditional functions and expressions ++++ Conditional functions return one of their arguments by evaluating in an if-else -manner. {esql} supports these conditional functions: +manner. {esql} supports these conditional functions: -// tag::list[] +// tag::cond_list[] * <> * <> * <> * <> -// end::list[] +// end::cond_list[] include::case.asciidoc[] include::coalesce.asciidoc[] diff --git a/docs/reference/esql/functions/date-time-functions.asciidoc b/docs/reference/esql/functions/date-time-functions.asciidoc index 59f999f1843fc..8ff7b1e974eeb 100644 --- a/docs/reference/esql/functions/date-time-functions.asciidoc +++ b/docs/reference/esql/functions/date-time-functions.asciidoc @@ -1,5 +1,5 @@ [[esql-date-time-functions]] -=== {esql} date-time functions +==== {esql} date-time functions ++++ Date-time functions @@ -7,14 +7,14 @@ {esql} supports these date-time functions: -// tag::list[] +// tag::date_list[] * <> * <> * <> * <> * <> * <> -// end::list[] +// end::date_list[] include::auto_bucket.asciidoc[] include::date_extract.asciidoc[] diff --git a/docs/reference/esql/functions/math-functions.asciidoc b/docs/reference/esql/functions/math-functions.asciidoc index 9338959354d3f..21131ae9074d7 100644 --- a/docs/reference/esql/functions/math-functions.asciidoc +++ b/docs/reference/esql/functions/math-functions.asciidoc @@ -1,5 +1,5 @@ [[esql-math-functions]] -=== {esql} mathematical functions +==== {esql} mathematical functions ++++ Mathematical functions @@ -7,7 +7,7 @@ {esql} supports these mathematical functions: -// tag::list[] +// tag::math_list[] * <> * <> * <> @@ -28,7 +28,7 @@ * <> * <> * <> -// end::list[] +// end::math_list[] include::abs.asciidoc[] include::acos.asciidoc[] diff --git a/docs/reference/esql/functions/mv-functions.asciidoc b/docs/reference/esql/functions/mv-functions.asciidoc index d4f9a07af4ff7..83dbaaadc5c06 100644 --- a/docs/reference/esql/functions/mv-functions.asciidoc +++ b/docs/reference/esql/functions/mv-functions.asciidoc @@ -1,5 +1,5 @@ [[esql-mv-functions]] -=== {esql} multivalue functions +==== {esql} multivalue functions ++++ Multivalue functions @@ -7,7 +7,7 @@ {esql} supports these multivalue functions: -// tag::list[] +// tag::mv_list[] * <> * <> * <> @@ -16,7 +16,7 @@ * <> * <> * <> -// end::list[] +// end::mv_list[] include::mv_avg.asciidoc[] include::mv_concat.asciidoc[] diff --git a/docs/reference/esql/functions/mv_concat.asciidoc b/docs/reference/esql/functions/mv_concat.asciidoc index d4be458455131..a13cef4c1e67a 100644 --- a/docs/reference/esql/functions/mv_concat.asciidoc +++ b/docs/reference/esql/functions/mv_concat.asciidoc @@ -1,6 +1,9 @@ [discrete] [[esql-mv_concat]] === `MV_CONCAT` +[.text-center] +image::esql/functions/signature/mv_concat.svg[Embedded,opts=inline] + Converts a multivalued string field into a single valued field containing the concatenation of all values separated by a delimiter: @@ -24,3 +27,6 @@ include::{esql-specs}/string.csv-spec[tag=mv_concat-to_string] include::{esql-specs}/string.csv-spec[tag=mv_concat-to_string-result] |=== +Supported types: + +include::types/mv_concat.asciidoc[] diff --git a/docs/reference/esql/functions/mv_count.asciidoc b/docs/reference/esql/functions/mv_count.asciidoc index 5bcda53ca5a9b..e6a61cd6e9c63 100644 --- a/docs/reference/esql/functions/mv_count.asciidoc +++ b/docs/reference/esql/functions/mv_count.asciidoc @@ -1,6 +1,9 @@ [discrete] [[esql-mv_count]] === `MV_COUNT` +[.text-center] +image::esql/functions/signature/mv_count.svg[Embedded,opts=inline] + Converts a multivalued field into a single valued field containing a count of the number of values: @@ -13,4 +16,6 @@ include::{esql-specs}/string.csv-spec[tag=mv_count] include::{esql-specs}/string.csv-spec[tag=mv_count-result] |=== -NOTE: This function accepts all types and always returns an `integer`. +Supported types: + +include::types/mv_count.asciidoc[] diff --git a/docs/reference/esql/functions/mv_dedupe.asciidoc b/docs/reference/esql/functions/mv_dedupe.asciidoc index c6af3f2d1aa3f..c85c6ddff4354 100644 --- a/docs/reference/esql/functions/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/mv_dedupe.asciidoc @@ -1,6 +1,9 @@ [discrete] [[esql-mv_dedupe]] === `MV_DEDUPE` +[.text-center] +image::esql/functions/signature/mv_dedupe.svg[Embedded,opts=inline] + Removes duplicates from a multivalued field. For example: [source.merge.styled,esql] @@ -12,4 +15,8 @@ include::{esql-specs}/string.csv-spec[tag=mv_dedupe] include::{esql-specs}/string.csv-spec[tag=mv_dedupe-result] |=== +Supported types: + +include::types/mv_dedupe.asciidoc[] + NOTE: `MV_DEDUPE` may, but won't always, sort the values in the field. diff --git a/docs/reference/esql/functions/mv_max.asciidoc b/docs/reference/esql/functions/mv_max.asciidoc index e8ef951f168f5..ed433b64a2813 100644 --- a/docs/reference/esql/functions/mv_max.asciidoc +++ b/docs/reference/esql/functions/mv_max.asciidoc @@ -1,6 +1,9 @@ [discrete] [[esql-mv_max]] === `MV_MAX` +[.text-center] +image::esql/functions/signature/mv_max.svg[Embedded,opts=inline] + Converts a multivalued field into a single valued field containing the maximum value. For example: [source.merge.styled,esql] @@ -23,3 +26,7 @@ include::{esql-specs}/string.csv-spec[tag=mv_max] |=== include::{esql-specs}/string.csv-spec[tag=mv_max-result] |=== + +Supported types: + +include::types/mv_max.asciidoc[] diff --git a/docs/reference/esql/functions/mv_min.asciidoc b/docs/reference/esql/functions/mv_min.asciidoc index 235e5c3c2bb5e..b0c8dd51c97fc 100644 --- a/docs/reference/esql/functions/mv_min.asciidoc +++ b/docs/reference/esql/functions/mv_min.asciidoc @@ -1,6 +1,9 @@ [discrete] [[esql-mv_min]] === `MV_MIN` +[.text-center] +image::esql/functions/signature/mv_min.svg[Embedded,opts=inline] + Converts a multivalued field into a single valued field containing the minimum value. For example: [source.merge.styled,esql] @@ -23,3 +26,7 @@ include::{esql-specs}/string.csv-spec[tag=mv_min] |=== include::{esql-specs}/string.csv-spec[tag=mv_min-result] |=== + +Supported types: + +include::types/mv_min.asciidoc[] diff --git a/docs/reference/esql/functions/operators.asciidoc b/docs/reference/esql/functions/operators.asciidoc index f1698bb2450b7..c236413b5dd7e 100644 --- a/docs/reference/esql/functions/operators.asciidoc +++ b/docs/reference/esql/functions/operators.asciidoc @@ -1,5 +1,5 @@ [[esql-operators]] -=== {esql} operators +==== {esql} operators ++++ Operators @@ -7,7 +7,7 @@ Boolean operators for comparing against one or multiple expressions. -// tag::list[] +// tag::op_list[] * <> * <> * <> @@ -20,7 +20,7 @@ Boolean operators for comparing against one or multiple expressions. * <> * <> * <> -// end::list[] +// end::op_list[] include::binary.asciidoc[] include::logical.asciidoc[] diff --git a/docs/reference/esql/functions/signature/auto_bucket.svg b/docs/reference/esql/functions/signature/auto_bucket.svg index 8343661db064e..7da9a053825f1 100644 --- a/docs/reference/esql/functions/signature/auto_bucket.svg +++ b/docs/reference/esql/functions/signature/auto_bucket.svg @@ -1 +1 @@ -AUTO_BUCKET(arg1,arg2,arg3,arg4) \ No newline at end of file +AUTO_BUCKET(field,buckets,from,to) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_concat.svg b/docs/reference/esql/functions/signature/mv_concat.svg index d12153de7241c..ec3a3aa4ae750 100644 --- a/docs/reference/esql/functions/signature/mv_concat.svg +++ b/docs/reference/esql/functions/signature/mv_concat.svg @@ -1 +1 @@ -MV_CONCAT(arg1,arg2) \ No newline at end of file +MV_CONCAT(v,delim) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_count.svg b/docs/reference/esql/functions/signature/mv_count.svg index 23d1f3a9f5bea..48e60f26e394d 100644 --- a/docs/reference/esql/functions/signature/mv_count.svg +++ b/docs/reference/esql/functions/signature/mv_count.svg @@ -1 +1 @@ -MV_COUNT(arg1) \ No newline at end of file +MV_COUNT(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_dedupe.svg b/docs/reference/esql/functions/signature/mv_dedupe.svg index 460dcae11e46c..92be3210ce895 100644 --- a/docs/reference/esql/functions/signature/mv_dedupe.svg +++ b/docs/reference/esql/functions/signature/mv_dedupe.svg @@ -1 +1 @@ -MV_DEDUPE(arg1) \ No newline at end of file +MV_DEDUPE(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_max.svg b/docs/reference/esql/functions/signature/mv_max.svg index aec9dbf82a445..6c64809be0720 100644 --- a/docs/reference/esql/functions/signature/mv_max.svg +++ b/docs/reference/esql/functions/signature/mv_max.svg @@ -1 +1 @@ -MV_MAX(arg1) \ No newline at end of file +MV_MAX(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/signature/mv_min.svg b/docs/reference/esql/functions/signature/mv_min.svg index 386057b5aa287..c6ef5e30c289c 100644 --- a/docs/reference/esql/functions/signature/mv_min.svg +++ b/docs/reference/esql/functions/signature/mv_min.svg @@ -1 +1 @@ -MV_MIN(arg1) \ No newline at end of file +MV_MIN(v) \ No newline at end of file diff --git a/docs/reference/esql/functions/string-functions.asciidoc b/docs/reference/esql/functions/string-functions.asciidoc index af77d4a08bff2..b209244b93297 100644 --- a/docs/reference/esql/functions/string-functions.asciidoc +++ b/docs/reference/esql/functions/string-functions.asciidoc @@ -1,5 +1,5 @@ [[esql-string-functions]] -=== {esql} string functions +==== {esql} string functions ++++ String functions @@ -7,7 +7,7 @@ {esql} supports these string functions: -// tag::list[] +// tag::string_list[] * <> * <> * <> @@ -18,7 +18,7 @@ * <> * <> * <> -// end::list[] +// end::string_list[] include::concat.asciidoc[] include::left.asciidoc[] diff --git a/docs/reference/esql/functions/type-conversion-functions.asciidoc b/docs/reference/esql/functions/type-conversion-functions.asciidoc index c24cf6685b84c..640006c936526 100644 --- a/docs/reference/esql/functions/type-conversion-functions.asciidoc +++ b/docs/reference/esql/functions/type-conversion-functions.asciidoc @@ -1,5 +1,5 @@ [[esql-type-conversion-functions]] -=== {esql} type conversion functions +==== {esql} type conversion functions ++++ Type conversion functions @@ -7,7 +7,7 @@ {esql} supports these type conversion functions: -// tag::list[] +// tag::type_list[] * <> * <> * <> @@ -19,7 +19,7 @@ * <> * <> * <> -// end::list[] +// end::type_list[] include::to_boolean.asciidoc[] include::to_datetime.asciidoc[] diff --git a/docs/reference/esql/functions/types/auto_bucket.asciidoc b/docs/reference/esql/functions/types/auto_bucket.asciidoc index d2f134b99fbb0..e0ede29e40df1 100644 --- a/docs/reference/esql/functions/types/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/types/auto_bucket.asciidoc @@ -1,5 +1,5 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | arg3 | arg4 | result +field | buckets | from | to | result |=== diff --git a/docs/reference/esql/functions/types/mv_concat.asciidoc b/docs/reference/esql/functions/types/mv_concat.asciidoc index 2836799f335e8..e3ea8b0830f47 100644 --- a/docs/reference/esql/functions/types/mv_concat.asciidoc +++ b/docs/reference/esql/functions/types/mv_concat.asciidoc @@ -1,6 +1,6 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | arg2 | result +v | delim | result keyword | keyword | keyword keyword | text | keyword text | keyword | keyword diff --git a/docs/reference/esql/functions/types/mv_count.asciidoc b/docs/reference/esql/functions/types/mv_count.asciidoc index 2fcdfc65fa63b..21794bcb1b959 100644 --- a/docs/reference/esql/functions/types/mv_count.asciidoc +++ b/docs/reference/esql/functions/types/mv_count.asciidoc @@ -1,10 +1,14 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result boolean | integer +datetime | integer double | integer integer | integer +ip | integer keyword | integer long | integer +text | integer unsigned_long | integer +version | integer |=== diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 4e12c68422662..dc1175ccdd951 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -1,9 +1,13 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result boolean | boolean +datetime | datetime double | double integer | integer +ip | ip keyword | keyword long | long +text | text +version | version |=== diff --git a/docs/reference/esql/functions/types/mv_max.asciidoc b/docs/reference/esql/functions/types/mv_max.asciidoc index 50740a71e4b49..1a9a1bee08388 100644 --- a/docs/reference/esql/functions/types/mv_max.asciidoc +++ b/docs/reference/esql/functions/types/mv_max.asciidoc @@ -1,10 +1,14 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result boolean | boolean +datetime | datetime double | double integer | integer +ip | ip keyword | keyword long | long +text | text unsigned_long | unsigned_long +version | version |=== diff --git a/docs/reference/esql/functions/types/mv_min.asciidoc b/docs/reference/esql/functions/types/mv_min.asciidoc index 50740a71e4b49..1a9a1bee08388 100644 --- a/docs/reference/esql/functions/types/mv_min.asciidoc +++ b/docs/reference/esql/functions/types/mv_min.asciidoc @@ -1,10 +1,14 @@ [%header.monospaced.styled,format=dsv,separator=|] |=== -arg1 | result +v | result boolean | boolean +datetime | datetime double | double integer | integer +ip | ip keyword | keyword long | long +text | text unsigned_long | unsigned_long +version | version |=== diff --git a/docs/reference/esql/index.asciidoc b/docs/reference/esql/index.asciidoc index c164fcc95b7d5..2946f4e61d629 100644 --- a/docs/reference/esql/index.asciidoc +++ b/docs/reference/esql/index.asciidoc @@ -5,112 +5,74 @@ :esql-specs: {esql-tests}/testFixtures/src/main/resources [partintro] --- preview::[] -The {es} Query Language ({esql}) is a query language that enables the iterative -exploration of data. +The {es} Query Language ({esql}) provides a powerful way to filter, transform, +and analyze data stored in {es}, and in the future in other runtimes. It is +designed to be easy to learn and use, by end users, SRE teams, application +developers, and administrators. -An {esql} query consists of a series of commands, separated by pipes. Each query -starts with a <>. A source command produces -a table, typically with data from {es}. +Users can author {esql} queries to find specific events, perform statistical +analysis, and generate visualizations. It supports a wide range of commands and +functions that enable users to perform various data operations, such as +filtering, aggregation, time-series analysis, and more. -image::images/esql/source-command.svg[A source command producing a table from {es},align="center"] +The {es} Query Language ({esql}) makes use of "pipes" (|) to manipulate and +transform data in a step-by-step fashion. This approach allows users to compose +a series of operations, where the output of one operation becomes the input for +the next, enabling complex data transformations and analysis. -A source command can be followed by one or more -<>. Processing commands change an -input table by adding, removing, or changing rows and columns. +[discrete] +=== The {esql} Compute Engine -image::images/esql/processing-command.svg[A processing command changing an input table,align="center"] +{esql} is more than a language: it represents a significant investment in new +compute capabilities within {es}. To achieve both the functional and performance +requirements for {esql}, it was necessary to build an entirely new compute +architecture. {esql} search, aggregation, and transformation functions are +directly executed within Elasticsearch itself. Query expressions are not +transpiled to Query DSL for execution. This approach allows {esql} to be +extremely performant and versatile. -You can chain processing commands, separated by a pipe character: `|`. Each -processing command works on the output table of the previous command. +The new {esql} execution engine was designed with performance in mind — it +operates on blocks at a time instead of per row, targets vectorization and cache +locality, and embraces specialization and multi-threading. It is a separate +component from the existing Elasticsearch aggregation framework with different +performance characteristics. -image::images/esql/chaining-processing-commands.svg[Processing commands can be chained,align="center"] +The {esql} documentation is organized in these sections: -The result of a query is the table produced by the final processing command. +<>:: +A tutorial to help you get started with {esql}. -[discrete] -[[esql-console]] -=== Run an {esql} query +<>:: -[discrete] -==== The {esql} API - -Use the <> to run an {esql} query: - -// tag::esql-query-api-example[] -[source,console] ----- -POST /_query -{ - "query": """ - FROM library - | EVAL year = DATE_TRUNC(1 YEARS, release_date) - | STATS MAX(page_count) BY year - | SORT year - | LIMIT 5 - """ -} ----- -// TEST[setup:library] -// end::esql-query-api-example[] - -The results come back in rows: - -[source,console-result] ----- -{ - "columns": [ - { "name": "MAX(page_count)", "type": "integer"}, - { "name": "year" , "type": "date"} - ], - "values": [ - [268, "1932-01-01T00:00:00.000Z"], - [224, "1951-01-01T00:00:00.000Z"], - [227, "1953-01-01T00:00:00.000Z"], - [335, "1959-01-01T00:00:00.000Z"], - [604, "1965-01-01T00:00:00.000Z"] - ] -} ----- - -By default, results are returned as JSON. You can return data in other -<> by specifying the `format` parameter in -the URL or by setting the `Accept` or `Content-Type` HTTP header. - -By default, an {esql} query returns up to 500 rows. You can change this using -the <>. The previous query's `LIMIT` command limits -results to 5 rows. The maximum number of returned rows is 10,000 rows, -regardless of the `LIMIT` value. - -[discrete] -==== {kib} +Reference documentation for the <>, +<>, and <>. Information about working with <> and <>. And guidance for +<> and <>. -Use {esql} in Discover to explore a data set. From the data view dropdown, -select *Try {esql}* to get started. +<>:: +An overview of using the <>, <>, and +<>. -NOTE: {esql} queries in Discover and Lens are subject to the time range selected -with the time filter. +<>:: +The current limitations of {esql}. --- +<>:: +A few examples of what you can with {esql}. include::esql-get-started.asciidoc[] include::esql-language.asciidoc[] -include::esql-commands.asciidoc[] - -include::esql-functions-operators.asciidoc[] - -include::esql-rest.asciidoc[] - -include::esql-kibana.asciidoc[] - -include::task-management.asciidoc[] +include::esql-using.asciidoc[] include::esql-limitations.asciidoc[] +include::esql-examples.asciidoc[] + :esql-tests!: :esql-specs!: diff --git a/docs/reference/esql/metadata-fields.asciidoc b/docs/reference/esql/metadata-fields.asciidoc index c034d4d0dd2b3..eb08ee085de38 100644 --- a/docs/reference/esql/metadata-fields.asciidoc +++ b/docs/reference/esql/metadata-fields.asciidoc @@ -34,11 +34,11 @@ like the other index fields: [source.merge.styled,esql] ---- -include::{esql-specs}/metadata-ignoreCsvTests.csv-spec[tag=multipleIndices] +include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=multipleIndices] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/metadata-ignoreCsvTests.csv-spec[tag=multipleIndices-result] +include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=multipleIndices-result] |=== Also, similar to the index fields, once an aggregation is performed, a @@ -47,9 +47,9 @@ used as grouping field: [source.merge.styled,esql] ---- -include::{esql-specs}/metadata-ignoreCsvTests.csv-spec[tag=metaIndexInAggs] +include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=metaIndexInAggs] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/metadata-ignoreCsvTests.csv-spec[tag=metaIndexInAggs-result] +include::{esql-specs}/metadata-IT_tests_only.csv-spec[tag=metaIndexInAggs-result] |=== diff --git a/docs/reference/esql/processing-commands/dissect.asciidoc b/docs/reference/esql/processing-commands/dissect.asciidoc index e6206615342f7..eca10c201c968 100644 --- a/docs/reference/esql/processing-commands/dissect.asciidoc +++ b/docs/reference/esql/processing-commands/dissect.asciidoc @@ -2,18 +2,58 @@ [[esql-dissect]] === `DISSECT` -`DISSECT` enables you to extract structured data out of a string. `DISSECT` -matches the string against a delimiter-based pattern, and extracts the specified -keys as columns. +**Syntax** -Refer to the <> for the -syntax of dissect patterns. +[source,txt] +---- +DISSECT input "pattern" [ append_separator=""] +---- + +*Parameters* + +`input`:: +The column that contains the string you want to structure. If the column has +multiple values, `DISSECT` will process each value. + +`pattern`:: +A dissect pattern. + +`append_separator=""`:: +A string used as the separator between appended values, when using the <>. + +*Description* + +`DISSECT` enables you to <>. `DISSECT` matches the string against a +delimiter-based pattern, and extracts the specified keys as columns. + +Refer to <> for the syntax of dissect patterns. + +*Example* + +// tag::examples[] +The following example parses a string that contains a timestamp, some text, and +an IP address: + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=basicDissect] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=basicDissect-result] +|=== + +By default, `DISSECT` outputs keyword string columns. To convert to another +type, use <>: [source.merge.styled,esql] ---- -include::{esql-specs}/dissect.csv-spec[tag=dissect] +include::{esql-specs}/docs.csv-spec[tag=dissectWithToDatetime] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/dissect.csv-spec[tag=dissect-result] +include::{esql-specs}/docs.csv-spec[tag=dissectWithToDatetime-result] |=== + +// end::examples[] \ No newline at end of file diff --git a/docs/reference/esql/processing-commands/enrich.asciidoc b/docs/reference/esql/processing-commands/enrich.asciidoc index 1aaa95367ff92..df402f3b1bd50 100644 --- a/docs/reference/esql/processing-commands/enrich.asciidoc +++ b/docs/reference/esql/processing-commands/enrich.asciidoc @@ -1,61 +1,101 @@ [discrete] [[esql-enrich]] === `ENRICH` -You can use `ENRICH` to add data from your existing indices to incoming records. -It's similar to <>, but it works at query time. + +**Syntax** + +[source,txt] +---- +ENRICH policy [ON match_field] [WITH [new_name1 = ]field1, [new_name2 = ]field2, ...] +---- + +*Parameters* + +`policy`:: +The name of the enrich policy. You need to <> +and <> the enrich policy first. + +`ON match_field`:: +The match field. `ENRICH` uses its value to look for records in the enrich +index. If not specified, the match will be performed on the column with the same +name as the `match_field` defined in the <>. + +`WITH fieldX`:: +The enrich fields from the enrich index that are added to the result as new +columns. If a column with the same name as the enrich field already exists, the +existing column will be replaced by the new column. If not specified, each of +the enrich fields defined in the policy is added + +`new_nameX =`:: +Enables you to change the name of the column that's added for each of the enrich +fields. Defaults to the enrich field name. + +*Description* + +`ENRICH` enables you to add data from existing indices as new columns using an +enrich policy. Refer to <> for information about setting up a +policy. + +image::images/esql/esql-enrich.png[align="center"] + +TIP: Before you can use `ENRICH`, you need to <>. + +*Examples* + +// tag::examples[] +The following example uses the `languages_policy` enrich policy to add a new +column for each enrich field defined in the policy. The match is performed using +the `match_field` defined in the <> and +requires that the input table has a column with the same name (`language_code` +in this example). `ENRICH` will look for records in the +<> based on the match field value. [source.merge.styled,esql] ---- -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich-result] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich-result] |=== -`ENRICH` requires an <> to be executed. -The enrich policy defines a match field (a key field) and a set of enrich fields. - -`ENRICH` will look for records in the <> based on the match field value. -The matching key in the input dataset can be defined using `ON `; if it's not specified, -the match will be performed on a field with the same name as the match field defined in the <>. +To use a column with a different name than the `match_field` defined in the +policy as the match field, use `ON `: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich_on] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich_on-result] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_on-result] |=== - -You can specify which attributes (between those defined as enrich fields in the policy) have to be added to the result, -using `WITH , ...` syntax. +By default, each of the enrich fields defined in the policy is added as a +column. To explicitly select the enrich fields that are added, use +`WITH , ...`: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich_with] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich_with-result] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_with-result] |=== - -Attributes can also be renamed using `WITH new_name=` +You can rename the columns that are added using `WITH new_name=`: [source.merge.styled,esql] ---- -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich_rename] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs-ignoreCsvTests.csv-spec[tag=enrich_rename-result] +include::{esql-specs}/docs-IT_tests_only.csv-spec[tag=enrich_rename-result] |=== - -By default (if no `WITH` is defined), `ENRICH` will add all the enrich fields defined in the <> -to the result. - -In case of name collisions, the newly created fields will override the existing fields. +In case of name collisions, the newly created columns will override existing +columns. +// end::examples[] diff --git a/docs/reference/esql/processing-commands/esql-processing-commands.asciidoc b/docs/reference/esql/processing-commands/esql-processing-commands.asciidoc deleted file mode 100644 index e075477af3303..0000000000000 --- a/docs/reference/esql/processing-commands/esql-processing-commands.asciidoc +++ /dev/null @@ -1,41 +0,0 @@ -[[esql-processing-commands]] -=== {esql} processing commands - -++++ -Processing commands -++++ - -{esql} processing commands change an input table by adding, removing, or changing -rows and columns. - -image::images/esql/processing-command.svg[A processing command changing an input table,align="center"] - -{esql} supports these processing commands: - -// tag::list[] -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -* <> -// end::list[] - -include::dissect.asciidoc[] -include::drop.asciidoc[] -include::enrich.asciidoc[] -include::eval.asciidoc[] -include::grok.asciidoc[] -include::keep.asciidoc[] -include::limit.asciidoc[] -include::mv_expand.asciidoc[] -include::rename.asciidoc[] -include::sort.asciidoc[] -include::stats.asciidoc[] -include::where.asciidoc[] diff --git a/docs/reference/esql/processing-commands/grok.asciidoc b/docs/reference/esql/processing-commands/grok.asciidoc index 914c13b2320eb..c95fe59f888ce 100644 --- a/docs/reference/esql/processing-commands/grok.asciidoc +++ b/docs/reference/esql/processing-commands/grok.asciidoc @@ -2,20 +2,66 @@ [[esql-grok]] === `GROK` -`GROK` enables you to extract structured data out of a string. `GROK` matches -the string against patterns, based on regular expressions, and extracts the -specified patterns as columns. +**Syntax** -Refer to the <> for the syntax for -of grok patterns. +[source,txt] +---- +GROK input "pattern" +---- + +*Parameters* + +`input`:: +The column that contains the string you want to structure. If the column has +multiple values, `GROK` will process each value. + +`pattern`:: +A grok pattern. + +*Description* + +`GROK` enables you to <>. `GROK` matches the string against patterns, +based on regular expressions, and extracts the specified patterns as columns. + +Refer to <> for the syntax of grok patterns. + +*Examples* + +// tag::examples[] +The following example parses a string that contains a timestamp, an IP address, +an email address, and a number: + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=basicGrok] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=basicGrok-result] +|=== + +By default, `GROK` outputs keyword string columns. `int` and `float` types can +be converted by appending `:type` to the semantics in the pattern. For example +`{NUMBER:num:int}`: + +[source.merge.styled,esql] +---- +include::{esql-specs}/docs.csv-spec[tag=grokWithConversionSuffix] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/docs.csv-spec[tag=grokWithConversionSuffix-result] +|=== -For example: +For other type conversions, use <>: [source.merge.styled,esql] ---- -include::{esql-specs}/grok.csv-spec[tag=grok] +include::{esql-specs}/docs.csv-spec[tag=grokWithToDatetime] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/grok.csv-spec[tag=grok-result] +include::{esql-specs}/docs.csv-spec[tag=grokWithToDatetime-result] |=== +// end::examples[] diff --git a/docs/reference/esql/processing-commands/stats.asciidoc b/docs/reference/esql/processing-commands/stats.asciidoc index 638782a92e8e6..e0a9bbb52b03e 100644 --- a/docs/reference/esql/processing-commands/stats.asciidoc +++ b/docs/reference/esql/processing-commands/stats.asciidoc @@ -42,4 +42,12 @@ include::{esql-specs}/docs.csv-spec[tag=statsGroupByMultipleValues] The following aggregation functions are supported: -include::../functions/aggregation-functions.asciidoc[tag=list] +include::../functions/aggregation-functions.asciidoc[tag=agg_list] + +NOTE: `STATS` without any groups is much much faster than adding group. + +NOTE: Grouping on a single field is currently much more optimized than grouping + on many fields. In some tests we've seen grouping on a single `keyword` + field to be five times faster than grouping on two `keyword` fields. Don't + try to work around this combining the two fields together with something + like <> and then grouping - that's not going to be faster. diff --git a/docs/reference/esql/source-commands/esql-source-commands.asciidoc b/docs/reference/esql/source-commands/esql-source-commands.asciidoc deleted file mode 100644 index 3a795e617e8a5..0000000000000 --- a/docs/reference/esql/source-commands/esql-source-commands.asciidoc +++ /dev/null @@ -1,22 +0,0 @@ -[[esql-source-commands]] -=== {esql} source commands - -++++ -Source commands -++++ - -An {esql} source command produces a table, typically with data from {es}. - -image::images/esql/source-command.svg[A source command producing a table from {es},align="center"] - -{esql} supports these source commands: - -// tag::list[] -* <> -* <> -* <> -// end::list[] - -include::from.asciidoc[] -include::row.asciidoc[] -include::show.asciidoc[] diff --git a/docs/reference/esql/task-management.asciidoc b/docs/reference/esql/task-management.asciidoc index bc06e70f24bd7..dfaff96123035 100644 --- a/docs/reference/esql/task-management.asciidoc +++ b/docs/reference/esql/task-management.asciidoc @@ -1,11 +1,11 @@ [[esql-task-management]] -== {esql} task management +=== {esql} task management ++++ Task management ++++ -You can get running {esql} queries with the <>: +You can list running {esql} queries with the <>: [source,console,id=esql-task-management-get-all] ---- diff --git a/docs/reference/how-to/disk-usage.asciidoc b/docs/reference/how-to/disk-usage.asciidoc index 6eef9621ed9de..55a7b33f53cb3 100644 --- a/docs/reference/how-to/disk-usage.asciidoc +++ b/docs/reference/how-to/disk-usage.asciidoc @@ -137,5 +137,5 @@ if fields always occur in the same order. === Roll up historical data Keeping older data can be useful for later analysis but is often avoided due to -storage costs. You can use data rollups to summarize and store historical data -at a fraction of the raw data's storage cost. See <>. +storage costs. You can use downsampling to summarize and store historical data +at a fraction of the raw data's storage cost. See <>. diff --git a/docs/reference/ilm/apis/put-lifecycle.asciidoc b/docs/reference/ilm/apis/put-lifecycle.asciidoc index 674f75264662c..6d7c8ea5e297f 100644 --- a/docs/reference/ilm/apis/put-lifecycle.asciidoc +++ b/docs/reference/ilm/apis/put-lifecycle.asciidoc @@ -37,6 +37,11 @@ previous versions. ``:: (Required, string) Identifier for the policy. ++ +[IMPORTANT] +==== +To avoid naming collisions with built-in and Fleet-managed ILM policies, avoid using `@` as part of the id of your own ILM policies. +==== [[ilm-put-lifecycle-query-params]] ==== {api-query-parms-title} diff --git a/docs/reference/images/esql/esql-data-view-menu.png b/docs/reference/images/esql/esql-data-view-menu.png new file mode 100644 index 0000000000000..fbbbdf44d315c Binary files /dev/null and b/docs/reference/images/esql/esql-data-view-menu.png differ diff --git a/docs/reference/images/esql/esql-enrich-command.png b/docs/reference/images/esql/esql-enrich-command.png new file mode 100644 index 0000000000000..934258430d974 Binary files /dev/null and b/docs/reference/images/esql/esql-enrich-command.png differ diff --git a/docs/reference/images/esql/esql-enrich-policy.png b/docs/reference/images/esql/esql-enrich-policy.png new file mode 100644 index 0000000000000..abe55e805f1a4 Binary files /dev/null and b/docs/reference/images/esql/esql-enrich-policy.png differ diff --git a/docs/reference/images/esql/esql-enrich.png b/docs/reference/images/esql/esql-enrich.png new file mode 100644 index 0000000000000..a710c5e543688 Binary files /dev/null and b/docs/reference/images/esql/esql-enrich.png differ diff --git a/docs/reference/images/esql/esql-expanded-query-bar.png b/docs/reference/images/esql/esql-expanded-query-bar.png new file mode 100644 index 0000000000000..1c26d72b86fb9 Binary files /dev/null and b/docs/reference/images/esql/esql-expanded-query-bar.png differ diff --git a/docs/reference/images/esql/esql-icon-edit-visualization.svg b/docs/reference/images/esql/esql-icon-edit-visualization.svg new file mode 100644 index 0000000000000..c559f0e589c72 --- /dev/null +++ b/docs/reference/images/esql/esql-icon-edit-visualization.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/reference/images/esql/esql-icon-expand-query-bar.svg b/docs/reference/images/esql/esql-icon-expand-query-bar.svg new file mode 100644 index 0000000000000..fc2641318acb2 --- /dev/null +++ b/docs/reference/images/esql/esql-icon-expand-query-bar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/reference/images/esql/esql-icon-help.svg b/docs/reference/images/esql/esql-icon-help.svg new file mode 100644 index 0000000000000..84c9b8db397c9 --- /dev/null +++ b/docs/reference/images/esql/esql-icon-help.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/reference/images/esql/esql-icon-minimize-query-bar.svg b/docs/reference/images/esql/esql-icon-minimize-query-bar.svg new file mode 100644 index 0000000000000..54b9349a05d59 --- /dev/null +++ b/docs/reference/images/esql/esql-icon-minimize-query-bar.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/reference/images/esql/esql-icon-options.svg b/docs/reference/images/esql/esql-icon-options.svg new file mode 100644 index 0000000000000..6abb3ca9d0827 --- /dev/null +++ b/docs/reference/images/esql/esql-icon-options.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/reference/images/esql/esql-icon-save-visualization.svg b/docs/reference/images/esql/esql-icon-save-visualization.svg new file mode 100644 index 0000000000000..f80a5993acde1 --- /dev/null +++ b/docs/reference/images/esql/esql-icon-save-visualization.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/reference/images/esql/esql-kibana-auto-complete.png b/docs/reference/images/esql/esql-kibana-auto-complete.png new file mode 100644 index 0000000000000..5763e569c7668 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-auto-complete.png differ diff --git a/docs/reference/images/esql/esql-kibana-bar-chart.png b/docs/reference/images/esql/esql-kibana-bar-chart.png new file mode 100644 index 0000000000000..43190a34bf3c3 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-bar-chart.png differ diff --git a/docs/reference/images/esql/esql-kibana-create-rule.png b/docs/reference/images/esql/esql-kibana-create-rule.png new file mode 100644 index 0000000000000..c9fb14b0d2ee9 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-create-rule.png differ diff --git a/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png b/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png new file mode 100644 index 0000000000000..cea540e78f4a8 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-edit-on-dashboard.png differ diff --git a/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png b/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png new file mode 100644 index 0000000000000..15b95c650ea88 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-enrich-autocomplete.png differ diff --git a/docs/reference/images/esql/esql-kibana-enrich-step-1.png b/docs/reference/images/esql/esql-kibana-enrich-step-1.png new file mode 100644 index 0000000000000..22f71d987d3be Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-enrich-step-1.png differ diff --git a/docs/reference/images/esql/esql-kibana-enrich-step-2.png b/docs/reference/images/esql/esql-kibana-enrich-step-2.png new file mode 100644 index 0000000000000..c8d7f5c6a05e4 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-enrich-step-2.png differ diff --git a/docs/reference/images/esql/esql-kibana-enriched-data.png b/docs/reference/images/esql/esql-kibana-enriched-data.png new file mode 100644 index 0000000000000..85df6a629eca9 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-enriched-data.png differ diff --git a/docs/reference/images/esql/esql-kibana-in-line-editor.png b/docs/reference/images/esql/esql-kibana-in-line-editor.png new file mode 100644 index 0000000000000..14caf02e60ea2 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-in-line-editor.png differ diff --git a/docs/reference/images/esql/esql-kibana-visualization-type.png b/docs/reference/images/esql/esql-kibana-visualization-type.png new file mode 100644 index 0000000000000..e36d70725b8f2 Binary files /dev/null and b/docs/reference/images/esql/esql-kibana-visualization-type.png differ diff --git a/docs/reference/images/esql/unstructured-data.png b/docs/reference/images/esql/unstructured-data.png new file mode 100644 index 0000000000000..4753a91a71f67 Binary files /dev/null and b/docs/reference/images/esql/unstructured-data.png differ diff --git a/docs/reference/images/index-mgmt/management-data-stream.png b/docs/reference/images/index-mgmt/management-data-stream.png new file mode 100644 index 0000000000000..01534fdec2a23 Binary files /dev/null and b/docs/reference/images/index-mgmt/management-data-stream.png differ diff --git a/docs/reference/images/index-mgmt/management-enrich-policies.png b/docs/reference/images/index-mgmt/management-enrich-policies.png new file mode 100644 index 0000000000000..75911842ae434 Binary files /dev/null and b/docs/reference/images/index-mgmt/management-enrich-policies.png differ diff --git a/docs/reference/images/ingest/document-enrichment-add-inference-pipeline.png b/docs/reference/images/ingest/document-enrichment-add-inference-pipeline.png new file mode 100644 index 0000000000000..ddcf42e24ab83 Binary files /dev/null and b/docs/reference/images/ingest/document-enrichment-add-inference-pipeline.png differ diff --git a/docs/reference/images/ingest/document-enrichment-diagram.png b/docs/reference/images/ingest/document-enrichment-diagram.png new file mode 100644 index 0000000000000..89ae1d45e24d4 Binary files /dev/null and b/docs/reference/images/ingest/document-enrichment-diagram.png differ diff --git a/docs/reference/images/ingest/ingest-pipeline-ent-search-ui.png b/docs/reference/images/ingest/ingest-pipeline-ent-search-ui.png new file mode 100644 index 0000000000000..e0fc7d1ba8ffc Binary files /dev/null and b/docs/reference/images/ingest/ingest-pipeline-ent-search-ui.png differ diff --git a/docs/reference/indices/index-mgmt.asciidoc b/docs/reference/indices/index-mgmt.asciidoc index b89d2ec957af9..7a78f9452b85e 100644 --- a/docs/reference/indices/index-mgmt.asciidoc +++ b/docs/reference/indices/index-mgmt.asciidoc @@ -3,21 +3,15 @@ == Index management in {kib} {kib}'s *Index Management* features are an easy, convenient way to manage your -cluster's indices, <>, and <>. Practicing good index management ensures your data is stored +cluster's indices, <>, <>, and <>. Practicing good index management ensures your data is stored correctly and in the most cost-effective way possible. +To use these features, go to *Stack Management* > *Index Management*. + [discrete] [[index-mgmt-wyl]] -=== What you'll learn - -You'll learn how to: -* View and edit index settings. -* View mappings and statistics for an index. -* Perform index-level operations, such as refreshes. -* View and manage data streams. -* Create index templates to automatically configure new data streams and indices. [discrete] [[index-mgm-req-permissions]] @@ -31,82 +25,68 @@ If you use {es} {security-features}, the following or index's data. * The `manage_index_templates` cluster privilege to manage index templates. -To add these privileges in {kib}, go to *Stack Management > Security > Roles*. +To add these privileges, go to *Stack Management > Security > Roles* or use the <>. [discrete] [[view-edit-indices]] -=== View and edit indices +=== Manage indices -Open {kib}'s main menu and click *Stack Management > Index Management*. +Investigate your indices and perform operations from the *Indices* view. [role="screenshot"] image::images/index-mgmt/management_index_labels.png[Index Management UI] -The *Index Management* page contains an overview of your indices. +* To show details and perform operations such as close, forcemerge, and flush, +click the index name. To perform operations +on multiple indices, select their checkboxes and then open the *Manage* menu. +For more information on managing indices, refer to <>. + +* To filter the list of indices, use the search bar or click a badge. Badges indicate if an index is a <>, a <>, or <>. -Clicking a badge narrows the list to only indices of that type. -You can also filter indices using the search bar. - -By clicking the index name, you can open an index details page to investigate the index -<>, <>, and statistics. -On this page, you can also edit the index settings. - -To view and explore the documents within an index, click the *Discover index* button to open {kibana-ref}/discover.html[Discover]. - +* To drill down into the index +<>, <>, and statistics, +click an index name. From this view, you can navigate to *Discover* to +further explore the documents in the index. ++ [role="screenshot"] image::images/index-mgmt/management_index_details.png[Index Management UI] -[float] -=== Perform index-level operations - -Use the *Manage* menu to perform index-level operations. This menu -is available in the index details view, or when you select the checkbox of one or more -indices on the overview page. The menu includes the following actions: - -* <> -* <> -* <> -* <> -* <> -* <> [float] [[manage-data-streams]] === Manage data streams -The *Data Streams* view lists your data streams and lets you examine or delete -them. +Investigate your data streams and address lifecycle management needs in the *Data Streams* view. -To view more information about a data stream, such as its generation or its -current index lifecycle policy, click the stream's name. +The value in the *Indices* column indicates the number of backing indices. Click this number to drill down into details. -To view and explore the data within a data stream, click the compass icon image:compassicon.png[width=3%] next to the data stream name to open {kibana-ref}/discover.html[Discover]. +A value in the data retention column indicates that the data stream is managed by a <>. +This value is the time period for which your data is guaranteed to be stored. Data older than this period can be deleted by +Elasticsearch at a later time. [role="screenshot"] -image::images/index-mgmt/management_index_data_stream_stats.png[Data stream details] +image::images/index-mgmt/management-data-stream.png[Data stream details] -To view information about the stream's backing indices, click the number in the -*Indices* column. +* To view more information about a data stream, such as its generation or its +current index lifecycle policy, click the stream's name. From this view, you can navigate to *Discover* to +further explore data within the data stream. -[role="screenshot"] -image::images/index-mgmt/management_index_data_stream_backing_index.png[Backing index] +* preview:[]To edit the data retention value, open the *Manage* menu, and then click *Edit data retention*. +This action is only available if your data stream is not managed by an ILM policy. [float] [[manage-index-templates]] === Manage index templates -The *Index Templates* view lists your templates and lets you examine, -edit, clone, and delete them. Changes made to an index template do not +Create, +edit, clone, and delete your index templates in the *Index Templates* view. Changes made to an index template do not affect existing indices. [role="screenshot"] image::images/index-mgmt/management-index-templates.png[Index templates] -If you don't have any templates, you can create one using the *Create template* -wizard. - [float] ==== Try it: Create an index template @@ -236,3 +216,32 @@ earlier. GET /my-index-000001,my-index-000002 -------------------------------------------------- // TEST[continued] + +[float] +[[manage-enrich-policies]] +=== Manage enrich policies + +Use the *Enrich Policies* view to add data from your existing indices to incoming documents during ingest. +An enrich policy contains: + +* The policy type that determines how the policy matches the enrich data to incoming documents +* The source indices that store enrich data as documents +* The fields from the source indices used to match incoming documents +* The enrich fields containing enrich data from the source indices that you want to add to incoming documents +* An optional <>. + +[role="screenshot"] +image::images/index-mgmt/management-enrich-policies.png[Enrich policies] + + +When creating an enrich policy, the UI walks you through the configuration setup and selecting the fields. +Before you can use the policy with an enrich processor or {esql} query, you must execute the policy. + +When executed, an enrich policy uses enrich data from the policy’s source indices +to create a streamlined system index called the enrich index. The policy uses this index to match and enrich incoming documents. + +Check out these examples: + +* <> +* <> +* <> diff --git a/docs/reference/indices/index-templates.asciidoc b/docs/reference/indices/index-templates.asciidoc index c1903cf13654b..538fb5b97860a 100644 --- a/docs/reference/indices/index-templates.asciidoc +++ b/docs/reference/indices/index-templates.asciidoc @@ -28,7 +28,7 @@ applied. template, the settings from the <> request take precedence over settings specified in the index template and its component templates. -* Settings specified in the index template itself take precedence over the settings +* Settings specified in the index template itself take precedence over the settings in its component templates. * If a new data stream or index matches more than one index template, the index template with the highest priority is used. @@ -65,6 +65,8 @@ For example, if you don't use {fleet} or {agent} and want to create a template for the `logs-*` index pattern, assign your template a priority of `500`. This ensures your template is applied instead of the built-in template for `logs-*-*`. + +- To avoid naming collisions with built-in and Fleet-managed index templates, avoid using `@` as part of the name of your own index templates. **** [discrete] diff --git a/docs/reference/indices/put-component-template.asciidoc b/docs/reference/indices/put-component-template.asciidoc index 35b42f94640be..794f01cb7f3ae 100644 --- a/docs/reference/indices/put-component-template.asciidoc +++ b/docs/reference/indices/put-component-template.asciidoc @@ -91,23 +91,28 @@ Name of the component template to create. {es} includes the following built-in component templates: // tag::built-in-component-templates[] -- `logs-mappings` -- `logs-settings` -- `metrics-mappings` -- `metrics-settings` -- `metrics-tsdb-settings` -- `synthetics-mapping` -- `synthetics-settings` +- `logs@mappings` +- `logs@settings` +- `metrics@mappings` +- `metrics@settings` +- `metrics@tsdb-settings` +- `synthetics@mapping` +- `synthetics@settings` // end::built-in-component-templates[] {fleet-guide}/fleet-overview.html[{agent}] uses these templates to configure -backing indices for its data streams. If you use {agent} and want to overwrite -one of these templates, set the `version` for your replacement template higher -than the current version. - -If you don't use {agent} and want to disable all built-in component and index -templates, set <> to `false` -using the <>. +backing indices for its data streams. +If you want to customize these templates, don't override them as they may be reset after an update. +Instead, look for a `*@custom` component template in the `composed_of` section of the managed index template. +These custom component templates allow you to customize the mappings of managed index templates, +without having to override managed index templates or component templates. +Note that the custom component templates may not exist yet. +After you create them using the <>, they'll be picked up by the index template. +See <> on how to apply the changes to the corresponding data stream. + +To avoid naming collisions with built-in and Fleet-managed component templates, +avoid using `@` as part of your own component template names. +The exception of that rule are the `*@custom` component templates that let you safely customize managed index templates. ==== [[put-component-template-api-query-params]] diff --git a/docs/reference/ingest.asciidoc b/docs/reference/ingest.asciidoc index ddba7c4e775ce..e2b4cf6fa10db 100644 --- a/docs/reference/ingest.asciidoc +++ b/docs/reference/ingest.asciidoc @@ -307,13 +307,13 @@ matches these templates to your {fleet} data streams based on the {fleet-guide}/data-streams.html#data-streams-naming-scheme[stream's naming scheme]. -Each default integration pipeline calls a nonexistent, unversioned `@custom` ingest pipeline. +Each default integration pipeline calls a nonexistent, unversioned `*@custom` ingest pipeline. If unaltered, this pipeline call has no effect on your data. However, you can modify this call to create custom pipelines for integrations that persist across upgrades. Refer to {fleet-guide}/data-streams-pipeline-tutorial.html[Tutorial: Transform data with custom ingest pipelines] to learn more. {fleet} doesn't provide a default ingest pipeline for the **Custom logs** integration, -but you can specify a pipeline for this integration using an +but you can specify a pipeline for this integration using an <> or a <>. @@ -990,3 +990,4 @@ GET _nodes/stats/ingest?filter_path=nodes.*.ingest include::ingest/common-log-format-example.asciidoc[] include::ingest/enrich.asciidoc[] include::ingest/processors.asciidoc[] +include::ingest/search-ingest-pipelines.asciidoc[] diff --git a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc index c12f2f12733fd..ee3af9c21de8f 100644 --- a/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/execute-enrich-policy.asciidoc @@ -77,8 +77,9 @@ and are <>. [WARNING] ==== -Enrich indices should be used by the <> only. -Avoid using enrich indices for other purposes. +Enrich indices should only be used by the <> +or the <>. Avoid using enrich indices for +other purposes. ==== // end::execute-enrich-policy-def[] diff --git a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc index 5981bf9f6d7f7..ee33b0b320905 100644 --- a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc @@ -53,7 +53,7 @@ DELETE /_enrich/policy/my-policy ==== {api-prereq-title} // tag::enrich-policy-api-prereqs[] -If you use {es} {security-features}, you must have: +To use enrich policies, you must have: * `read` index privileges for any indices used * The `enrich_user` <> diff --git a/docs/reference/ingest/apis/put-pipeline.asciidoc b/docs/reference/ingest/apis/put-pipeline.asciidoc index 5b73a7803fdda..97c6a176dc256 100644 --- a/docs/reference/ingest/apis/put-pipeline.asciidoc +++ b/docs/reference/ingest/apis/put-pipeline.asciidoc @@ -43,7 +43,13 @@ PUT _ingest/pipeline/my-pipeline-id ``:: (Required, string) ID of the ingest pipeline to create or update. - ++ +[IMPORTANT] +==== +To avoid naming collisions with built-in and Fleet-managed ingest pipelines, avoid using `@` as part of your own ingest pipelines names. +The exception of that rule are the `*@custom` ingest pipelines that let you safely add a custom pipeline to managed pipelines. +See also <>. +==== [[put-pipeline-api-query-params]] ==== {api-query-parms-title} diff --git a/docs/reference/ingest/enrich.asciidoc b/docs/reference/ingest/enrich.asciidoc index 7b87333cbf416..4688b21d37e7d 100644 --- a/docs/reference/ingest/enrich.asciidoc +++ b/docs/reference/ingest/enrich.asciidoc @@ -67,14 +67,16 @@ Directly matching incoming documents to documents in source indices could be slow and resource intensive. To speed things up, the enrich processor uses an enrich index. +// tag::enrich-index[] Enrich indices contain enrich data from source indices but have a few special properties to help streamline them: * They are system indices, meaning they're managed internally by {es} and only - intended for use with enrich processors. + intended for use with enrich processors and the {esql} `ENRICH` command. * They always begin with `.enrich-*`. * They are read-only, meaning you can't directly change them. * They are <> for fast retrieval. +// end::enrich-index[] -- [[enrich-setup]] @@ -115,8 +117,9 @@ include::{es-repo-dir}/ingest/apis/enrich/put-enrich-policy.asciidoc[tag=enrich- [[create-enrich-source-index]] ==== Add enrich data +// tag::create-enrich-source-index[] To begin, add documents to one or more source indices. These documents should -contain the enrich data you eventually want to add to incoming documents. +contain the enrich data you eventually want to add to incoming data. You can manage source indices just like regular {es} indices using the <> and <> APIs. @@ -125,29 +128,38 @@ You also can set up {beats-ref}/getting-started.html[{beats}], such as a {filebeat-ref}/filebeat-installation-configuration.html[{filebeat}], to automatically send and index documents to your source indices. See {beats-ref}/getting-started.html[Getting started with {beats}]. +//end::create-enrich-source-index[] [[create-enrich-policy]] ==== Create an enrich policy +// tag::create-enrich-policy[] After adding enrich data to your source indices, use the -<> to create an enrich policy. +<> or +<> to create an enrich policy. [WARNING] ==== Once created, you can't update or change an enrich policy. See <>. ==== +// end::create-enrich-policy[] [[execute-enrich-policy]] ==== Execute the enrich policy -Once the enrich policy is created, you can execute it using the -<> to create an +// tag::execute-enrich-policy1[] +Once the enrich policy is created, you need to execute it using the +<> or +<> to create an <>. +// end::execute-enrich-policy1[] image::images/ingest/enrich/enrich-policy-index.svg[align="center"] +// tag::execute-enrich-policy2[] include::apis/enrich/execute-enrich-policy.asciidoc[tag=execute-enrich-policy-def] +// end::execute-enrich-policy2[] [[add-enrich-processor]] ==== Add an enrich processor to an ingest pipeline @@ -208,9 +220,10 @@ Instead, you can: . Replace the previous enrich policy with the new enrich policy - in any in-use enrich processors. + in any in-use enrich processors or {esql} queries. -. Use the <> API +. Use the <> API or +<> to delete the previous enrich policy. // end::update-enrich-policy[] diff --git a/docs/reference/ingest/processors/dissect.asciidoc b/docs/reference/ingest/processors/dissect.asciidoc index 0d3389b4195d7..9d408ea150644 100644 --- a/docs/reference/ingest/processors/dissect.asciidoc +++ b/docs/reference/ingest/processors/dissect.asciidoc @@ -44,11 +44,13 @@ and result in a document with the following fields: -------------------------------------------------- // NOTCONSOLE -A dissect pattern is defined by the parts of the string that will be discarded. In the example above the first part -to be discarded is a single space. Dissect finds this space, then assigns the value of `clientip` is everything up +// tag::intro-example-explanation[] +A dissect pattern is defined by the parts of the string that will be discarded. In the previous example, the first part +to be discarded is a single space. Dissect finds this space, then assigns the value of `clientip` everything up until that space. -Later dissect matches the `[` and then `]` and then assigns `@timestamp` to everything in-between `[` and `]`. -Paying special attention the parts of the string to discard will help build successful dissect patterns. +Next, dissect matches the `[` and then `]` and then assigns `@timestamp` to everything in-between `[` and `]`. +Paying special attention to the parts of the string to discard will help build successful dissect patterns. +// end::intro-example-explanation[] Successful matches require all keys in a pattern to have a value. If any of the `%{keyname}` defined in the pattern do not have a value, then an exception is thrown and may be handled by the <> directive. @@ -85,9 +87,11 @@ include::common-options.asciidoc[] [[dissect-key-modifiers]] ==== Dissect key modifiers +// tag::dissect-key-modifiers[] Key modifiers can change the default behavior for dissection. Key modifiers may be found on the left or right of the `%{keyname}` always inside the `%{` and `}`. For example `%{+keyname ->}` has the append and right padding modifiers. +// end::dissect-key-modifiers[] [[dissect-key-modifiers-table]] .Dissect Key Modifiers @@ -104,6 +108,7 @@ modifiers. [[dissect-modifier-skip-right-padding]] ===== Right padding modifier (`->`) +// tag::dissect-modifier-skip-right-padding[] The algorithm that performs the dissection is very strict in that it requires all characters in the pattern to match the source string. For example, the pattern `%{fookey} %{barkey}` (1 space), will match the string "foo{nbsp}bar" (1 space), but will not match the string "foo{nbsp}{nbsp}bar" (2 spaces) since the pattern has only 1 space and the @@ -137,10 +142,12 @@ Right padding modifier with empty key example * ts = 1998-08-10T17:15:42,466 * level = WARN |====== +// end::dissect-modifier-skip-right-padding[] [[append-modifier]] ===== Append modifier (`+`) [[dissect-modifier-append-key]] +// tag::append-modifier[] Dissect supports appending two or more results together for the output. Values are appended left to right. An append separator can be specified. In this example the append_separator is defined as a space. @@ -152,10 +159,12 @@ Append modifier example | *Result* a| * name = john jacob jingleheimer schmidt |====== +// end::append-modifier[] [[append-order-modifier]] ===== Append with order modifier (`+` and `/n`) [[dissect-modifier-append-key-with-order]] +// tag::append-order-modifier[] Dissect supports appending two or more results together for the output. Values are appended based on the order defined (`/n`). An append separator can be specified. In this example the append_separator is defined as a comma. @@ -167,10 +176,12 @@ Append with order modifier example | *Result* a| * name = schmidt,john,jingleheimer,jacob |====== +// end::append-order-modifier[] [[named-skip-key]] ===== Named skip key (`?`) [[dissect-modifier-named-skip-key]] +// tag::named-skip-key[] Dissect supports ignoring matches in the final result. This can be done with an empty key `%{}`, but for readability it may be desired to give that empty key a name. @@ -182,10 +193,12 @@ Named skip key modifier example * clientip = 1.2.3.4 * @timestamp = 30/Apr/1998:22:00:52 +0000 |====== +// end::named-skip-key[] [[reference-keys]] ===== Reference keys (`*` and `&`) [[dissect-modifier-reference-keys]] +// tag::reference-keys[] Dissect support using parsed values as the key/value pairings for the structured content. Imagine a system that partially logs in key/value pairs. Reference keys allow you to maintain that key/value relationship. @@ -199,3 +212,4 @@ Reference key modifier example * ip = 1.2.3.4 * error = REFUSED |====== +// end::reference-keys[] diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index 5f0fedfd7902c..0995e3f643813 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -25,10 +25,13 @@ ingested in the pipeline. include::common-options.asciidoc[] |====== -IMPORTANT: You cannot use the `input_output` field with the `target_field` and +[IMPORTANT] +================================================== +* You cannot use the `input_output` field with the `target_field` and `field_map` fields. For NLP models, use the `input_output` option. For {dfanalytics} models, use the `target_field` and `field_map` option. - +* Each {infer} input field must be single strings, not arrays of strings. +================================================== [discrete] [[inference-input-output-example]] diff --git a/docs/reference/ingest/search-inference-processing.asciidoc b/docs/reference/ingest/search-inference-processing.asciidoc new file mode 100644 index 0000000000000..fad11b28858b7 --- /dev/null +++ b/docs/reference/ingest/search-inference-processing.asciidoc @@ -0,0 +1,187 @@ +[[ingest-pipeline-search-inference]] +=== Inference processing + +When you create an index through the **Content** UI, a set of default ingest pipelines are also created, including a ML inference pipeline. +The <> uses inference processors to analyze fields and enrich documents with the output. +Inference processors use ML trained models, so you need to use a built-in model or {ml-docs}/ml-nlp-deploy-models.html[deploy a trained model in your cluster^] to use this feature. + +This guide focuses on the ML inference pipeline, its use, and how to manage it. + +[IMPORTANT] +==== +This feature is not available at all Elastic subscription levels. +Refer to the Elastic subscriptions pages for https://www.elastic.co/subscriptions/cloud[Elastic Cloud^] and https://www.elastic.co/subscriptions[self-managed] deployments. +==== + +[discrete#ingest-pipeline-search-inference-nlp-use-cases] +==== NLP use cases + +{ml-docs}/ml-nlp-overview.html[Natural Language Processing (NLP)^] allows developers to create rich search experiences that go beyond the standards of lexical search. +A few examples of ways to improve search experiences through the use of NLP models: + +[discrete#ingest-pipeline-search-inference-elser] +==== ELSER text expansion + +Using Elastic's {ml-docs}/ml-nlp-elser.html[ELSER machine learning model^] you can easily incorporate text expansion for your queries. +This works by using ELSER to provide semantic enrichments to your documents upon ingestion, combined with the power of <> to provide automated text expansion at query time. + +[discrete#ingest-pipeline-search-inference-ner] +==== Named entity recognition (NER) + +Most commonly used to detect entities such as People, Places, and Organization information from text, {ml-docs}/ml-nlp-extract-info.html#ml-nlp-ner[NER^] can be used to extract key information from text and group results based on that information. +A sports news media site could use NER to automatically extract names of professional athletes, stadiums, and sports teams in their articles and link to season stats or schedules. + +[discrete#ingest-pipeline-search-inference-text-classification] +==== Text classification + +{ml-docs}/ml-nlp-classify-text.html#ml-nlp-text-classification[Text classification^] is commonly used for sentiment analysis and can be used for similar tasks, such as labeling content as containing hate speech in public forums, or triaging and labeling support tickets so they reach the correct level of escalation automatically. + +[discrete#ingest-pipeline-search-inference-text-embedding] +==== Text embedding + +Analyzing a text field using a {ml-docs}/ml-nlp-search-compare.html#ml-nlp-text-embedding[Text embedding^] model will generate a <> representation of the text. +This array of numeric values encodes the semantic _meaning_ of the text. +Using the same model with a user's search query will produce a vector that can then be used to search, ranking results based on vector similarity - semantic similarity - as opposed to traditional word or text similarity. + +A common use case is a user searching FAQs, or a support agent searching a knowledge base, where semantically similar content may be indexed with little similarity in phrasing. + +[discrete#ingest-pipeline-search-inference-nlp-in-enterprise-search] +==== NLP in Content UI + +[discrete#ingest-pipeline-search-inference-overview] +===== Overview of ML inference pipeline + +The diagram below shows how documents are processed during ingestion. + +// Original diagram: https://whimsical.com/ml-in-enterprise-search-ErCetPqrcCPu2QYHvAwrgP@2bsEvpTYSt1Hiuq6UBf68tUWvFiXdzLt6ao +image::../images/ingest/document-enrichment-diagram.png["ML inference pipeline diagram"] + +* Documents are processed by the `my-index-0001` pipeline, which happens automatically when indexing through a an Elastic connector or crawler. +* The `_run_ml_inference` field is set to `true` to ensure the ML inference pipeline (`my-index-0001@ml-inference`) is executed. + This field is removed during the ingestion process. +* The inference processor analyzes the `message` field on the document using the `my-positivity-model-id` trained model. + The inference output is stored in the `ml.inference.positivity_prediction` field. +* The resulting enriched document is then indexed into the `my-index-0001` index. +* The `ml.inference.positivity_prediction` field can now be used at query time to search for documents above or below a certain threshold. + +[discrete#ingest-pipeline-search-inference-find-deploy-manage-trained-models] +==== Find, deploy, and manage trained models + +This feature is intended to make it easier to use your ML trained models. +First, you need to figure out which model works best for your data. +Make sure to use a {ml-docs}/ml-nlp-model-ref.html[compatible third party NLP model^]. +Since these are publicly available, it is not possible to fine-tune models before {ml-docs}/ml-nlp-deploy-models.html[deploying them^]. + +Trained models must be available in the current {kibana-ref}/xpack-spaces.html[Kibana Space^] and running in order to use them. +By default, models should be available in all Kibana Spaces that have the *Analytics* > *Machine Learning* feature enabled. +To manage your trained models, use the Kibana UI and navigate to *Stack Management -> Machine Learning -> Trained Models*. +Spaces can be controlled in the **spaces** column. +To stop or start a model, go to the *Machine Learning* tab in the *Analytics* menu of Kibana and click *Trained Models* in the *Model Management* section. + +[NOTE] +========================= +The `monitor_ml` <> is required to manage ML models and ML inference pipelines which use those models. +========================= + +[discrete#ingest-pipeline-search-inference-add-inference-processors] +===== Add inference processors to your ML inference pipeline + +To create the index-specific ML inference pipeline, go to *Search -> Content -> Indices -> -> Pipelines* in the Kibana UI. + +If you only see the `ent-search-generic-ingestion` pipeline, you will need to click *Copy and customize* to create index-specific pipelines. +This will create the `{index_name}@ml-inference` pipeline. + +Once your index-specific ML inference pipeline is ready, you can add inference processors that use your ML trained models. +To add an inference processor to the ML inference pipeline, click the *Add Inference Pipeline* button in the *Machine Learning Inference Pipelines* card. + +[role="screenshot"] +image::../images/ingest/document-enrichment-add-inference-pipeline.png["Add Inference Pipeline"] + +Here, you'll be able to: + +1. Choose a name for your pipeline. + - This name will need to be unique across the whole deployment. + If you want this pipeline to be index-specific, we recommend including the name of your index in the pipeline name. +2. Select the ML trained model you want to use. +3. Select one or more source fields as input for the inference processor. + - If there are no source fields available, your index will need a <>. +4. (Optional) Choose a name for your target field. +This is where the output of the inference model will be stored. Changing the default name is only possible if you have a single source field selected. +5. Add the source-target field mapping to the configuration by clicking the *Add* button. +6. Repeat steps 3-5 for each field mapping you want to add. +7. (Optional) Test the pipeline with a sample document. +8. (Optional) Review the pipeline definition before creating it with the *Create pipeline* button. + +[discrete#ingest-pipeline-search-inference-manage-inference-processors] +===== Manage and delete inference processors from your ML inference pipeline + +Inference processors added to your index-specific ML inference pipelines are normal Elasticsearch pipelines. +Once created, each processor will have options to *View in Stack Management* and *Delete Pipeline*. +Deleting an inference processor from within the *Content* UI deletes the pipeline and also removes its reference from your index-specific ML inference pipeline. + +These pipelines can also be viewed, edited, and deleted in Kibana via *Stack Management -> Ingest Pipelines*, just like all other Elasticsearch ingest pipelines. +You may also use the <>. +If you delete any of these pipelines outside of the *Content* UI in Kibana, make sure to edit the ML inference pipelines that reference them. + +[discrete#ingest-pipeline-search-inference-update-mapping] +==== Update mappings to use ML inference pipelines + +After setting up an ML inference pipeline or attaching an existing one, it may be necessary to manually create the field mappings in order to support the referenced trained ML model's output. +This needs to happen before the pipeline is first used to index some documents, otherwise the model output fields could be inferred with the wrong type. + +[NOTE] +==== +This doesn't apply when you're creating a pipeline with the ELSER model, for which the index mappings are automatically updated in the process. +==== + +The required field name and type depends on the configuration of the pipeline and the trained model it uses. +For example, if you configure a `text_embedding` model, select `summary` as a source field, and `ml.inference.summary` as the target field, the inference output will be stored in `ml.inference..predicted_value` as a <> type. +In order to support semantic search on this field, it must be added to the mapping: + +[source,console] +---- +PUT my-index-0001/_mapping +{ + "properties": { + "ml.inference.summary.predicted_value": { <1> + "type": "dense_vector", <2> + "dims": 768, <3> + "index": true, + "similarity": "dot_product" + } + } +} +---- +// NOTCONSOLE +// TEST[skip:TODO] + +<1> The output of the ML model is stored in the configured target field suffixed with `predicted_value`. +<2> Choose a field type that is compatible with the inference output and supports your search use cases. +<3> Set additional properties as necessary. + +[TIP] +==== +You can check the shape of the generated output before indexing any documents while creating the ML inference pipeline under the *Test* tab. +Simply provide a sample document, click *Simulate*, and look for the `ml.inference` object in the results. +==== + +[discrete#ingest-pipeline-search-inference-test-inference-pipeline] +==== Test your ML inference pipeline + +To ensure the ML inference pipeline will be run when ingesting documents, you must make sure the documents you are ingesting have a field named `_run_ml_inference` that is set to `true` and you must set the pipeline to `{index_name}`. +For connector and crawler indices, this will happen automatically if you've configured the settings appropriately for the pipeline name `{index_name}`. +To manage these settings: + + 1. Go to *Search > Content > Indices > > Pipelines*. + 2. Click on the *Settings* link in the *Ingest Pipelines* card for the `{index_name}` pipeline. + 3. Ensure *ML inference pipelines* is selected. + If it is not, select it and save the changes. + +[discrete#ingest-pipeline-search-inference-learn-more] +==== Learn More + +* See <> for information on the various pipelines that are created. +* Learn about {ml-docs}/ml-nlp-elser.html[ELSER], Elastic's proprietary retrieval model for semantic search with sparse vectors. +* https://huggingface.co/models?library=pytorch&pipeline_tag=token-classification&sort=downloads[NER HuggingFace Models^] +* https://huggingface.co/models?library=pytorch&pipeline_tag=text-classification&sort=downloads[Text Classification HuggingFace Models^] +* https://huggingface.co/models?library=pytorch&pipeline_tag=sentence-similarity&sort=downloads[Text Embedding HuggingFace Models^] diff --git a/docs/reference/ingest/search-ingest-pipelines.asciidoc b/docs/reference/ingest/search-ingest-pipelines.asciidoc new file mode 100644 index 0000000000000..049a74670581d --- /dev/null +++ b/docs/reference/ingest/search-ingest-pipelines.asciidoc @@ -0,0 +1,280 @@ +[[ingest-pipeline-search]] +== Ingest pipelines in Search + +You can manage ingest pipelines through Elasticsearch APIs or Kibana UIs. + +The *Content* UI under *Search* has a set of tools for creating and managing indices optimized for search use cases (non time series data). +You can also manage your ingest pipelines in this UI. + +[discrete] +[[ingest-pipeline-search-where]] +=== Find pipelines in Content UI + +To work with ingest pipelines using these UI tools, you'll be using the *Pipelines* tab on your search-optimized Elasticsearch index. + +To find this tab in the Kibana UI: + +1. Go to *Search > Content > Elasticsearch indices*. +2. Select the index you want to work with. For example, `search-my-index`. +3. On the index's overview page, open the *Pipelines* tab. +4. From here, you can follow the instructions to create custom pipelines, and set up ML inference pipelines. + +The tab is highlighted in this screenshot: + +[.screenshot] +image::../images/ingest/ingest-pipeline-ent-search-ui.png[align="center"] + +[discrete#ingest-pipeline-search-in-enterprise-search] +=== Overview + +These tools can be particularly helpful by providing a layer of customization and post-processing of documents. +For example: + +* providing consistent extraction of text from binary data types +* ensuring consistent formatting +* providing consistent sanitization steps (removing PII like phone numbers or SSN's) + +It can be a lot of work to set up and manage production-ready pipelines from scratch. +Considerations such as error handling, conditional execution, sequencing, versioning, and modularization must all be taken into account. + +To this end, when you create indices for search use cases, (including {enterprise-search-ref}/crawler.html[Elastic web crawler], {enterprise-search-ref}/connectors.html[Elastic connector], and API indices), each index already has a pipeline set up with several processors that optimize your content for search. + +This pipeline is called `ent-search-generic-ingestion`. +While it is a "managed" pipeline (meaning it should not be tampered with), you can view its details via the Kibana UI or the Elasticsearch API. +You can also <>. + +You can control whether you run some of these processors. +While all features are enabled by default, they are eligible for opt-out. +For {enterprise-search-ref}/crawler.html[Elastic crawler] and {enterprise-search-ref}/connectors.html[Elastic connectors], you can opt out (or back in) per index, and your choices are saved. +For API indices, you can opt out (or back in) by including specific fields in your documents. +<>. + +At the deployment level, you can change the default settings for all new indices. +This will not effect existing indices. + +Each index also provides the capability to easily create index-specific ingest pipelines with customizable processing. +If you need that extra flexibility, you can create a custom pipeline by going to your pipeline settings and choosing to "copy and customize". +This will replace the index's use of `ent-search-generic-ingestion` with 3 newly generated pipelines: + +1. `` +2. `@custom` +3. `@ml-inference` + +Like `ent-search-generic-ingestion`, the first of these is "managed", but the other two can and should be modified to fit your needs. +You can view these pipelines using the platform tools (Kibana UI, Elasticsearch API), and can also +<>. + +[discrete#ingest-pipeline-search-pipeline-settings] +=== Pipeline Settings + +Aside from the pipeline itself, you have a few configuration options which control individual features of the pipelines. + +* **Extract Binary Content** - This controls whether or not binary documents should be processed and any textual content should be extracted. +* **Reduce Whitespace** - This controls whether or not consecutive, leading, and trailing whitespaces should be removed. + This can help to display more content in some search experiences. +* **Run ML Inference** - Only available on index-specific pipelines. + This controls whether or not the optional `@ml-inference` pipeline will be run. + Enabled by default. + +For Elastic web crawler and connectors, you can opt in or out per index. +These settings are stored in Elasticsearch in the `.elastic-connectors` index, in the document that corresponds to the specific index. +These settings can be changed there directly, or through the Kibana UI at *Search > Content > Indices > > Pipelines > Settings*. + +You can also change the deployment wide defaults. +These settings are stored in the Elasticsearch mapping for `.elastic-connectors` in the `_meta` section. +These settings can be changed there directly, or from the Kibana UI at *Search > Content > Settings* tab. +Changing the deployment wide defaults will not impact any existing indices, but will only impact any newly created indices defaults. +Those defaults will still be able to be overriden by the index-specific settings. + +[discrete#ingest-pipeline-search-pipeline-settings-using-the-api] +==== Using the API + +These settings are not persisted for indices that "Use the API". +Instead, changing these settings will, in real time, change the example cURL request displayed. +Notice that the example document in the cURL request contains three underscore-prefixed fields: + +[source,js] +---- +{ + ... + "_extract_binary_content": true, + "_reduce_whitespace": true, + "_run_ml_inference": true +} +---- +// NOTCONSOLE + +Omitting one of these special fields is the same as specifying it with the value `false`. + +[NOTE] +========================= +You must also specify the pipeline in your indexing request. +This is also shown in the example cURL request. +========================= + +[WARNING] +========================= +If the pipeline is not specified, the underscore-prefixed fields will actually be indexed, and will not impact any processing behaviors. +========================= + +[discrete#ingest-pipeline-search-details] +=== Details + +[discrete#ingest-pipeline-search-details-generic-reference] +==== `ent-search-generic-ingestion` Reference + +You can access this pipeline with the <> or via Kibana's < Ingest Pipelines>> UI. + +[WARNING] +========================= +This pipeline is a "managed" pipeline. +That means that it is not intended to be edited. +Editing/updating this pipeline manually could result in unintended behaviors, or difficulty in upgrading in the future. +If you want to make customizations, we recommend you utilize index-specific pipelines (see below), specifically <@custom` pipeline>>. +========================= + +[discrete#ingest-pipeline-search-details-generic-reference-processors] +===== Processors + +1. `attachment` - this uses the <> processor to convert any binary data stored in a document's `_attachment` field to a nested object of plain text and metadata. +2. `set_body` - this uses the <> processor to copy any plain text extracted from the previous step and persist it on the document in the `body` field. +3. `remove_replacement_chars` - this uses the <> processor to remove characters like "�" from the `body` field. +4. `remove_extra_whitespace` - this uses the <> processor to replace consecutive whitespace characters with single spaces in the `body` field. + While not perfect for every use case (see below for how to disable), this can ensure that search experiences display more content and highlighting and less empty space for your search results. +5. `trim` - this uses the <> processor to remove any remaining leading or trailing whitespace from the `body` field. +6. `remove_meta_fields` - this final step of the pipeline uses the <> processor to remove special fields that may have been used elsewhere in the pipeline, whether as temporary storage or as control flow parameters. + +[discrete#ingest-pipeline-search-details-generic-reference-params] +===== Control flow parameters + +The `ent-search-generic-ingestion` pipeline does not always run all processors. +It utilizes a feature of ingest pipelines to <> based on the contents of each individual document. + +* `_extract_binary_content` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `attachment`, `set_body`, and `remove_replacement_chars` processors. + Note that the document will also need an `_attachment` field populated with base64-encoded binary data in order for the `attachment` processor to have any output. + If the `_extract_binary_content` field is missing or `false` on a source document, these processors will be skipped. +* `_reduce_whitespace` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `remove_extra_whitespace` and `trim` processors. + These processors only apply to the `body` field. + If the `_reduce_whitespace` field is missing or `false` on a source document, these processors will be skipped. + +Crawler, Native Connectors, and Connector Clients will automatically add these control flow parameters based on the settings in the index's Pipeline tab. +To control what settings any new indices will have upon creation, see the deployment wide content settings. +See <>. + +[discrete#ingest-pipeline-search-details-specific] +==== Index-specific ingest pipelines + +In the Kibana UI for your index, by clicking on the Pipelines tab, then *Settings > Copy and customize*, you can quickly generate 3 pipelines which are specific to your index. +These 3 pipelines replace `ent-search-generic-ingestion` for the index. +There is nothing lost in this action, as the `` pipeline is a superset of functionality over the `ent-search-generic-ingestion` pipeline. + +[IMPORTANT] +==== +The "copy and customize" button is not available at all Elastic subscription levels. +Refer to the Elastic subscriptions pages for https://www.elastic.co/subscriptions/cloud[Elastic Cloud^] and https://www.elastic.co/subscriptions[self-managed] deployments. +==== + +[discrete#ingest-pipeline-search-details-specific-reference] +===== `` Reference + +This pipeline looks and behaves a lot like the <>, but with <>. + +[WARNING] +========================= +You should not rename this pipeline. +========================= + +[WARNING] +========================= +This pipeline is a "managed" pipeline. +That means that it is not intended to be edited. +Editing/updating this pipeline manually could result in unintended behaviors, or difficulty in upgrading in the future. +If you want to make customizations, we recommend you utilize <@custom` pipeline>>. +========================= + +[discrete#ingest-pipeline-search-details-specific-reference-processors] +====== Processors + +In addition to the processors inherited from the <>, the index-specific pipeline also defines: + +* `index_ml_inference_pipeline` - this uses the <> processor to run the `@ml-inference` pipeline. + This processor will only be run if the source document includes a `_run_ml_inference` field with the value `true`. +* `index_custom_pipeline` - this uses the <> processor to run the `@custom` pipeline. + +[discrete#ingest-pipeline-search-details-specific-reference-params] +====== Control flow parameters + +Like the `ent-search-generic-ingestion` pipeline, the `` pipeline does not always run all processors. +In addition to the `_extract_binary_content` and `_reduce_whitespace` control flow parameters, the `` pipeline also supports: + +* `_run_ml_inference` - if this field is present and has a value of `true` on a source document, the pipeline will attempt to run the `index_ml_inference_pipeline` processor. + If the `_run_ml_inference` field is missing or `false` on a source document, this processor will be skipped. + +Crawler, Native Connectors, and Connector Clients will automatically add these control flow parameters based on the settings in the index's Pipeline tab. +To control what settings any new indices will have upon creation, see the deployment wide content settings. +See <>. + +[discrete#ingest-pipeline-search-details-specific-ml-reference] +===== `@ml-inference` Reference + +This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines tab of your index, or from the *Stack Management > Ingest Pipelines* page. +Unlike the `ent-search-generic-ingestion` pipeline and the `` pipeline, this pipeline is NOT "managed". + +It's possible to add one or more ML inference pipelines to an index in the *Content* UI. +This pipeline will serve as a container for all of the ML inference pipelines configured for the index. +Each ML inference pipeline added to the index is referenced within `@ml-inference` using a `pipeline` processor. + +[WARNING] +========================= +You should not rename this pipeline. +========================= + +[NOTE] +========================= +The `monitor_ml` Elasticsearch cluster permission is required in order to manage ML models and ML inference pipelines which use those models. +========================= + +[discrete#ingest-pipeline-search-details-specific-custom-reference] +===== `@custom` Reference + +This pipeline is empty to start (no processors), but can be added to via the Kibana UI either through the Pipelines +tab of your index, or from the *Stack Management > Ingest Pipelines* page. +Unlike the `ent-search-generic-ingestion` pipeline and the `` pipeline, this pipeline is NOT "managed". + +You are encouraged to make additions and edits to this pipeline, provided its name remains the same. +This provides a convenient hook from which to add custom processing and transformations for your data. +Be sure to read the <> to see what options are available. + +[WARNING] +========================= +You should not rename this pipeline. +========================= + +[discrete#ingest-pipeline-search-upgrading-notes] +=== Upgrading notes + +.Expand to see upgrading notes +[%collapsible%closed] +============= + +* `app_search_crawler` - Since 8.3, {app-search-crawler} has utilized this pipeline to power its binary content +extraction. + You can read more about this pipeline and its usage in the {app-search-ref}/web-crawler-reference.html#web-crawler-reference-binary-content-extraction[App Search Guide]. + When upgrading from 8.3 to 8.5+, be sure to note any changes that you made to the `app_search_crawler` pipeline. + These changes should be re-applied to each index's `@custom` pipeline in order to ensure a consistent data processing experience. + In 8.5+, the <> is required *in addition* to the configurations mentioned in the {app-search-ref}/web-crawler-reference.html#web-crawler-reference-binary-content-extraction[App Search Guide]. + +* `ent_search_crawler` - Since 8.4, the Elastic web crawler has utilized this pipeline to power its binary content extraction. + You can read more about this pipeline and its usage in the {enterprise-search-ref}/crawler-managing.html#crawler-managing-binary-content[Elastic web crawler Guide]. + When upgrading from 8.4 to 8.5+, be sure to note any changes that you made to the `ent_search_crawler` pipeline. +These changes should be re-applied to each index's `@custom` pipeline in order to ensure a consistent data processing experience. + In 8.5+, the <> is required *in addition* to the configurations mentioned in the {enterprise-search-ref}/crawler-managing.html#crawler-managing-binary-content[Elastic web crawler Guide]. + +* `ent-search-generic-ingestion` - Since 8.5, Native Connectors, Connector Clients, and new (>8.4) Elastic web crawler indices will all make use of this pipeline by default. + You can <> above. + As this pipeline is "managed", any modifications that were made to `app_search_crawler` and/or `ent_search_crawler` should NOT be made to `ent-search-generic-ingestion`. + Instead, if such customizations are desired, you should utilize <>, placing all modifications in the `@custom` pipeline(s). +============= + +include::search-inference-processing.asciidoc[] +include::search-nlp-tutorial.asciidoc[] diff --git a/docs/reference/ingest/search-nlp-tutorial.asciidoc b/docs/reference/ingest/search-nlp-tutorial.asciidoc new file mode 100644 index 0000000000000..d5eacb6951023 --- /dev/null +++ b/docs/reference/ingest/search-nlp-tutorial.asciidoc @@ -0,0 +1,259 @@ +[[nlp-example]] +=== Tutorial: Natural language processing (NLP) +++++ +NLP tutorial +++++ + +This guide focuses on a concrete task: getting a machine learning trained model loaded into Elasticsearch and set up to enrich your documents. + +Elasticsearch supports many different ways to use machine learning models. +In this guide, we will use a trained model to enrich documents at ingest time using ingest pipelines configured within Kibana's *Content* UI. + +In this guide, we'll accomplish the above using the following steps: + +- *Set up a Cloud deployment*: We will use Elastic Cloud to host our deployment, as it makes it easy to scale machine learning nodes. +- *Load a model with Eland*: We will use the Eland Elasticsearch client to import our chosen model into Elasticsearch. +Once we've verified that the model is loaded, we will be able to use it in an ingest pipeline. +- *Setup an ML inference pipeline*: We will create an Elasticsearch index with a predefined mapping and add an inference pipeline. +- *Show enriched results*: We will ingest some data into our index and observe that the pipeline enriches our documents. + +Follow the instructions to load a text classification model and set it up to enrich some photo comment data. +Once you're comfortable with the steps involved, use this guide as a blueprint for working with other machine learning trained models. + +*Table of contents*: + +* <> +* <> +* <> +* <> +* <> +* <> +* <> + +[discrete#nlp-example-cloud-deployment] +==== Create an {ecloud} deployment + +Your deployment will need a machine learning instance to upload and deploy trained models. + +If your team already has an Elastic Cloud deployment, make sure it has at least one machine learning instance. +If it does not, *Edit* your deployment to add capacity. +For this tutorial, we'll need at least 2GB of RAM on a single machine learning instance. + +If your team does not have an Elastic Cloud deployment, start by signing up for a https://cloud.elastic.co/registration[free Elastic Cloud trial^]. +After creating an account, you'll have an active subscription and you'll be prompted to create your first deployment. + +Follow the steps to *Create* a new deployment. +Make sure to add capacity to the *Machine Learning instances* under the *Advanced settings* before creating the deployment. +To simplify scaling, turn on the *Autoscale this deployment* feature. +If you use autoscaling, you should increase the minimum RAM for the machine learning instance. +For this tutorial, we'll need at least 2GB of RAM. +For more details, refer to {cloud}/ec-create-deployment.html[Create a deployment^] in the Elastic Cloud documentation. + +Enriching documents using machine learning was introduced in Enterprise Search *8.5.0*, so be sure to use version *8.5.0 or later*. + +[discrete#nlp-example-clone-eland] +==== Clone Eland + +Elastic's https://github.com/elastic/eland[Eland^] tool makes it easy to upload trained models to your deployment via Docker. + +Eland is a specialized Elasticsearch client for exploring and manipulating data, which we can use to upload trained models into Elasticsearch. + +To clone and build Eland using Docker, run the following commands: + +[source,sh] +---- +git clone git@github.com:elastic/eland.git +cd eland +docker build -t elastic/eland . +---- + +[discrete#nlp-example-deploy-model] +==== Deploy the trained model + +Now that you have a deployment and a way to upload models, you will need to choose a trained model that fits your data. +https://huggingface.co/[Hugging Face^] has a large repository of publicly available trained models. +The model you choose will depend on your data and what you would like to do with it. + +For the purposes of this guide, let's say we have a data set of photo comments. +In order to promote a positive atmosphere on our platform, we'd like the first few comments on each photo to be positive comments. +For this task, the https://huggingface.co/distilbert-base-uncased-finetuned-sst-2-english?text=I+like+you.+I+love+you[`distilbert-base-uncased-finetuned-sst-2-english`^] model is a good fit. + +To upload this model to your deployment, you need a few pieces of data: + +- The deployment URL. + You can get this via the *Copy endpoint* link next to *Elasticsearch* on the deployment management screen. + It will look like `https://ml-test.es.us-west1.gcp.cloud.es.io:443`. + Make sure to append the port if it isn't present, as Eland requires the URL to have a scheme, host, and port. + 443 is the default port for HTTPS. +- The deployment username and password for your deployment. + This is displayed one time when the deployment is created. + It will look like `elastic` and `xUjaFNTyycG34tQx5Iq9JIIA`. +- The trained model id. + This comes from Hugging Face. + It will look like `distilbert-base-uncased-finetuned-sst-2-english`. +- The trained model task type. + This is the kind of machine learning task the model is designed to achieve. + It will be one of: `fill_mask`, `ner`, `text_classification`, `text_embedding`, and `zero_shot_classification`. + For our use case, we will use `text_classification`. + +We can now upload our chosen model to Elasticsearch by providing these options to Eland. + +[source,sh] +---- +docker run -it --rm --network host \ + elastic/eland \ + eland_import_hub_model \ + --url https://ml-test.es.us-west1.gcp.cloud.es.io:443 \ + -u elastic -p \ + --hub-model-id distilbert-base-uncased-finetuned-sst-2-english \ + --task-type text_classification \ + --start +---- + +This script should take roughly 2-3 minutes to run. +Once your model has been successfully deployed to your Elastic deployment, navigate to Kibana's *Trained Models* page to verify it is ready. +You can find this page under *Machine Learning > Analytics* menu and then *Trained Models > Model Management*. +If you do not see your model in the list, you may need to click *Synchronize your jobs and trained models*. +Your model is now ready to be used. + +[discrete#nlp-example-create-index-and-define-ml-inference-pipeline] +==== Create an index and define an ML inference pipeline + +We are now ready to use Kibana's *Content* UI to enrich our documents with inference data. +Before we ingest photo comments into Elasticsearch, we will first create an ML inference pipeline. +The pipeline will enrich the incoming photo comments with inference data indicating if the comments are positive. + +Let's say our photo comments look like this when they are uploaded as a document into Elasticsearch: + +[source,js] +---- +{ + "photo_id": "78sdv71-8vdkjaj-knew629-vc8459p", + "body": "your dog is so cute!", + ... +} +---- +// NOTCONSOLE + +We want to run our documents through an inference processor that uses the trained model we uploaded to determine if the comments are positive. +To do this, we first need to set up an Elasticsearch index. + +* From the Kibana home page, start by clicking the Search card. +* Click the button to *Create an Elasticsearch index*. +* Choose to *Use the API* and give your index a name. +It will automatically be prefixed with `search-`. +For this demo, we will name the index `search-photo-comments`. +* After clicking *Create Index*, you will be redirected to the overview page for your new index. + +To configure the ML inference pipeline, we need the index to have an existing field mapping so we can choose which field to analyze. +This can be done via the <> in the Kibana Dev Tools or simply through a cURL command: + +[source,js] +---- +PUT search-photo-comments/_mapping +{ + "properties": { + "photo_id": { "type": "keyword" }, + "body": { "type": "text" } + } +} +---- +// NOTCONSOLE + +Now it's time to create an inference pipeline. + +1. From the overview page for your `search-photo-comments` index in "Search", click the *Pipelines* tab. +By default, Elasticsearch does not create any index-specific ingest pipelines. +2. Because we want to customize these pipelines, we need to *Copy and customize* the `ent-search-generic-ingestion` ingest pipeline. +Find this option above the settings for the `ent-search-generic-ingestion` ingest pipeline. +This will create two new index-specific ingest pipelines. + +Next, we'll add an inference pipeline. + +1. Locate the section *Machine Learning Inference Pipelines*, then select *Add inference pipeline*. +2. Give your inference pipeline a name, select the trained model we uploaded, and select the `body` field to be analyzed. +3. Optionally, choose a field name to store the output. +We'll call it `positivity_result`. + +You can also run example documents through a simulator and review the pipeline before creating it. + +[discrete#nlp-example-index-documents] +==== Index documents + +At this point, everything is ready to enrich documents at index time. + +From the Kibana Dev Console, or simply using a cURL command, we can index a document. +We'll use a `_run_ml_inference` flag to tell the `search-photo-comments` pipeline to run the index-specific ML inference pipeline that we created. +This field will not be indexed in the document. + +[source,js] +---- +POST search-photo-comments/_doc/my-new-doc?pipeline=search-photo-comments +{ + "photo_id": "78sdv71-8vdkjaj-knew629-vc8459p", + "body": "your dog is so cute!", + "_run_ml_inference": true +} +---- +// NOTCONSOLE + +Once the document is indexed, use the API to retrieve it and view the enriched data. + +[source,js] +---- +GET search-photo-comments/_doc/my-new-doc +---- +// NOTCONSOLE + +[source,js] +---- +{ + "_index": "search-photo-comments", + "_id": "_MQggoQBKYghsSwHbDvG", + ... + "_source": { + ... + "photo_id": "78sdv71-8vdkjaj-knew629-vc8459p", + "body": "your dog is so cute!", + "ml": { + "inference": { + "positivity_result": { + "predicted_value": "POSITIVE", + "prediction_probability": 0.9998022925461774, + "model_id": "distilbert-base-uncased-finetuned-sst-2-english" + } + } + } + } +} +---- +// NOTCONSOLE + +The document has new fields with the enriched data. +The `ml.inference.positivity_result` field is an object with the analysis from the machine learning model. +The model we used predicted with 99.98% confidence that the analyzed text is positive. + +From here, we can write search queries to boost on `ml.inference.positivity_result.predicted_value`. +This field will also be stored in a top-level `positivity_result` field if the model was confident enough. + +[discrete#nlp-example-summary] +==== Summary + +In this guide, we covered how to: + +- Set up a deployment on Elastic Cloud with a machine learning instance. +- Deploy a machine learning trained model using the Eland Elasticsearch client. +- Configure an inference pipeline to use the trained model with Elasticsearch. +- Enrich documents with inference results from the trained model at ingest time. +- Query your search engine and sort by `positivity_result`. + +[discrete#nlp-example-learn-more] +==== Learn more + +* {ml-docs}/ml-nlp-model-ref.html[Compatible third party models^] +* {ml-docs}/ml-nlp-overview.html[NLP Overview^] +* https://github.com/elastic/eland#docker[Docker section of Eland readme^] +* {ml-docs}/ml-nlp-deploy-models.html[Deploying a model ML guide^] +* {ml-docs}/ml-nlp-import-model.html#ml-nlp-authentication[Eland Authentication methods^] +* <> +// * <> diff --git a/docs/reference/intro.asciidoc b/docs/reference/intro.asciidoc index 9c9f2774d8f2c..3ea2c96eeaf02 100644 --- a/docs/reference/intro.asciidoc +++ b/docs/reference/intro.asciidoc @@ -261,6 +261,6 @@ secondary clusters are read-only followers. As with any enterprise system, you need tools to secure, manage, and monitor your {es} clusters. Security, monitoring, and administrative features that are integrated into {es} enable you to use {kibana-ref}/introduction.html[{kib}] -as a control center for managing a cluster. Features like <> and <> +as a control center for managing a cluster. Features like <> and <> help you intelligently manage your data over time. diff --git a/docs/reference/landing-page.asciidoc b/docs/reference/landing-page.asciidoc index 489168a443bb1..1ddd0cfa28128 100644 --- a/docs/reference/landing-page.asciidoc +++ b/docs/reference/landing-page.asciidoc @@ -172,7 +172,7 @@ Data management
  • - Roll up or transform your data + Downsampling
  • Snapshot and restore diff --git a/docs/reference/ml/common/apis/get-ml-info.asciidoc b/docs/reference/ml/common/apis/get-ml-info.asciidoc index 48f74314560e6..104375bd641c8 100644 --- a/docs/reference/ml/common/apis/get-ml-info.asciidoc +++ b/docs/reference/ml/common/apis/get-ml-info.asciidoc @@ -120,7 +120,9 @@ This is a possible response: }, "limits" : { "effective_max_model_memory_limit": "28961mb", - "total_ml_memory": "86883mb" + "total_ml_memory": "86883mb", + "total_ml_processors": 16, + "max_single_ml_node_processors": 16 } } ---- @@ -129,3 +131,5 @@ This is a possible response: // TESTRESPONSE[s/"build_hash": "99a07c016d5a73"/"build_hash": "$body.native_code.build_hash"/] // TESTRESPONSE[s/"effective_max_model_memory_limit": "28961mb"/"effective_max_model_memory_limit": "$body.limits.effective_max_model_memory_limit"/] // TESTRESPONSE[s/"total_ml_memory": "86883mb"/"total_ml_memory": "$body.limits.total_ml_memory"/] +// TESTRESPONSE[s/"total_ml_processors": 16/"total_ml_processors": $body.limits.total_ml_processors/] +// TESTRESPONSE[s/"max_single_ml_node_processors": 16/"max_single_ml_node_processors": $body.limits.max_single_ml_node_processors/] diff --git a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc index 85de1fb7d4105..d7201fcf42c0a 100644 --- a/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc +++ b/docs/reference/ml/trained-models/apis/infer-trained-model.asciidoc @@ -57,7 +57,8 @@ Controls the amount of time to wait for {infer} results. Defaults to 10 seconds. (Required, array) An array of objects to pass to the model for inference. The objects should contain the fields matching your configured trained model input. Typically for -NLP models, the field name is `text_field`. +NLP models, the field name is `text_field`. Each {infer} input field specified +in this property must be single strings not arrays of strings. //Begin inference_config `inference_config`:: diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index fc43177bd74b4..81ac3f6cc4cdc 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -151,7 +151,8 @@ a separate single-node cluster then you must start again: folder>>. . Configure `discovery.seed_hosts` or `discovery.seed_providers` and other -relevant discovery settings. +relevant discovery settings. Ensure `cluster.initial_master_nodes` is not set +on any node. . Restart the node and verify that it joins the existing cluster rather than forming its own one-node cluster. @@ -171,4 +172,6 @@ relevant discovery settings. . Restart all the nodes and verify that they have formed a single cluster. +. Remove `cluster.initial_master_nodes` from every node's configuration. + **** diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 24b951a46ed9d..c5a3ebb782edd 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -685,3 +685,6 @@ a different index configuration, like the number of primary shards. ===== Allow expensive queries Percolate queries will not be executed if <> is set to false. + +===== Using custom similarities +Percolate queries will not respect any configured <>. They always use the default Lucene similarity. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index b0a1c157c8cd0..f065c2deeae72 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -1931,25 +1931,4 @@ Refer to <>. [role="exclude",id="remote-clusters-privileges"] === Configure roles and users for remote clusters -Refer to <>. - -[role="exclude",id="ingest-pipeline-search"] -=== Ingest pipelines for Search indices - -coming::[8.11.0] - -[role="exclude",id="ingest-pipeline-search-inference"] -=== Inference processing for Search indices - -coming::[8.11.0] - -[id="ingest-pipeline-search-inference-update-mapping"] -==== Update mapping - -coming::[8.11.0] - -[role="exclude",id="nlp-example"] -=== Tutorial: Natural language processing (NLP) - -coming::[8.11.0] - +Refer to <>. \ No newline at end of file diff --git a/docs/reference/rest-api/security/get-service-accounts.asciidoc b/docs/reference/rest-api/security/get-service-accounts.asciidoc index b08e73f789053..526c6e65ccf33 100644 --- a/docs/reference/rest-api/security/get-service-accounts.asciidoc +++ b/docs/reference/rest-api/security/get-service-accounts.asciidoc @@ -91,8 +91,7 @@ GET /_security/service/elastic/fleet-server ], "privileges": [ "read", - "write", - "auto_configure" + "write" ], "allow_restricted_indices": false }, diff --git a/docs/reference/rollup/api-quickref.asciidoc b/docs/reference/rollup/api-quickref.asciidoc index f6975650dc2b8..1676161e5fbcc 100644 --- a/docs/reference/rollup/api-quickref.asciidoc +++ b/docs/reference/rollup/api-quickref.asciidoc @@ -5,10 +5,7 @@ API quick reference ++++ -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] Most rollup endpoints have the following base: diff --git a/docs/reference/rollup/apis/delete-job.asciidoc b/docs/reference/rollup/apis/delete-job.asciidoc index 97e75ff7181df..c563e705039e2 100644 --- a/docs/reference/rollup/apis/delete-job.asciidoc +++ b/docs/reference/rollup/apis/delete-job.asciidoc @@ -6,12 +6,9 @@ Delete {rollup-jobs} ++++ -Deletes an existing {rollup-job}. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +Deletes an existing {rollup-job}. [[rollup-delete-job-request]] ==== {api-request-title} diff --git a/docs/reference/rollup/apis/get-job.asciidoc b/docs/reference/rollup/apis/get-job.asciidoc index be308f81d5ab7..fcafbbe95159b 100644 --- a/docs/reference/rollup/apis/get-job.asciidoc +++ b/docs/reference/rollup/apis/get-job.asciidoc @@ -5,12 +5,9 @@ Get job ++++ -Retrieves the configuration, stats, and status of {rollup-jobs}. - -experimental[] +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +Retrieves the configuration, stats, and status of {rollup-jobs}. [[rollup-get-job-request]] ==== {api-request-title} @@ -48,7 +45,7 @@ For details about a historical {rollup-job}, the ==== {api-response-body-title} `jobs`:: -(array) An array of {rollup-job} resources. +(array) An array of {rollup-job} resources. + .Properties of {rollup-job} resources [%collapsible%open] diff --git a/docs/reference/rollup/apis/put-job.asciidoc b/docs/reference/rollup/apis/put-job.asciidoc index c5e94a5b6c2f3..2392d7204df3b 100644 --- a/docs/reference/rollup/apis/put-job.asciidoc +++ b/docs/reference/rollup/apis/put-job.asciidoc @@ -6,12 +6,9 @@ Create {rollup-jobs} ++++ -Creates a {rollup-job}. - -experimental[] +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +Creates a {rollup-job}. [[rollup-put-job-api-request]] ==== {api-request-title} @@ -93,7 +90,7 @@ documents without a timestamp and a `date_histogram` group. The + .Properties of `date_histogram` [%collapsible%open] -===== +===== `calendar_interval` or `fixed_interval`:::: (Required, <>) The interval of time buckets to be generated when rolling up. For example, `60m` produces 60 minute (hourly) @@ -139,11 +136,11 @@ in `UTC`. //Begin histogram `histogram`::: (Optional, object) The histogram group aggregates one or more numeric fields -into numeric histogram intervals. +into numeric histogram intervals. + .Properties of `histogram` [%collapsible%open] -===== +===== `fields`:::: (Required, array) The set of fields that you wish to build histograms for. All fields specified must be some kind of numeric. Order does not matter. @@ -175,7 +172,7 @@ judicious which high-cardinality fields are included for that reason. + .Properties of `terms` [%collapsible%open] -===== +===== `fields`:::: (Required, string) The set of fields that you wish to collect terms for. This diff --git a/docs/reference/rollup/apis/rollup-caps.asciidoc b/docs/reference/rollup/apis/rollup-caps.asciidoc index 78563047962ca..95f652f6d4415 100644 --- a/docs/reference/rollup/apis/rollup-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-caps.asciidoc @@ -5,14 +5,11 @@ Get rollup caps ++++ +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] + Returns the capabilities of any {rollup-jobs} that have been configured for a specific index or index pattern. -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. - [[rollup-get-rollup-caps-request]] ==== {api-request-title} @@ -43,7 +40,7 @@ can be performed, and where does the data live? ==== {api-path-parms-title} ``:: - (string) Index, indices or index-pattern to return rollup capabilities for. + (string) Index, indices or index-pattern to return rollup capabilities for. `_all` may be used to fetch rollup capabilities from all jobs. diff --git a/docs/reference/rollup/apis/rollup-index-caps.asciidoc b/docs/reference/rollup/apis/rollup-index-caps.asciidoc index 9ca33d2b141de..c5b729f2e52e6 100644 --- a/docs/reference/rollup/apis/rollup-index-caps.asciidoc +++ b/docs/reference/rollup/apis/rollup-index-caps.asciidoc @@ -5,14 +5,11 @@ Get rollup index caps ++++ +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] + Returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored). -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. - [[rollup-get-rollup-index-caps-request]] ==== {api-request-title} diff --git a/docs/reference/rollup/apis/rollup-search.asciidoc b/docs/reference/rollup/apis/rollup-search.asciidoc index af6be2961afee..491dcc6c38ae2 100644 --- a/docs/reference/rollup/apis/rollup-search.asciidoc +++ b/docs/reference/rollup/apis/rollup-search.asciidoc @@ -5,12 +5,9 @@ Rollup search ++++ -Enables searching rolled-up data using the standard Query DSL. - -experimental[] +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +Enables searching rolled-up data using the standard Query DSL. [[rollup-search-request]] ==== {api-request-title} diff --git a/docs/reference/rollup/apis/start-job.asciidoc b/docs/reference/rollup/apis/start-job.asciidoc index b5e1edf3c16d8..c102c26ea5d8e 100644 --- a/docs/reference/rollup/apis/start-job.asciidoc +++ b/docs/reference/rollup/apis/start-job.asciidoc @@ -6,12 +6,9 @@ Start {rollup-jobs} ++++ -Starts an existing, stopped {rollup-job}. - -experimental[] +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +Starts an existing, stopped {rollup-job}. [[rollup-start-job-request]] ==== {api-request-title} @@ -36,7 +33,7 @@ to start a job that is already started, nothing happens. ``:: (Required, string) Identifier for the {rollup-job}. - + [[rollup-start-job-response-codes]] ==== {api-response-codes-title} diff --git a/docs/reference/rollup/apis/stop-job.asciidoc b/docs/reference/rollup/apis/stop-job.asciidoc index f714159a1c099..61e561b4ceac9 100644 --- a/docs/reference/rollup/apis/stop-job.asciidoc +++ b/docs/reference/rollup/apis/stop-job.asciidoc @@ -6,12 +6,9 @@ Stop {rollup-jobs} ++++ -Stops an existing, started {rollup-job}. - -experimental[] +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +Stops an existing, started {rollup-job}. [[rollup-stop-job-request]] ==== {api-request-title} @@ -52,7 +49,7 @@ processing and eventually moves the job to `STOPPED`. The timeout simply means the API call itself timed out while waiting for the status change. -- - + `wait_for_completion`:: (Optional, Boolean) If set to `true`, causes the API to block until the indexer state completely stops. If set to `false`, the API returns immediately diff --git a/docs/reference/rollup/index.asciidoc b/docs/reference/rollup/index.asciidoc index 5f5a36d55e627..a4394c3c930fd 100644 --- a/docs/reference/rollup/index.asciidoc +++ b/docs/reference/rollup/index.asciidoc @@ -2,10 +2,7 @@ [[xpack-rollup]] == Rolling up historical data -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] Keeping historical data around for analysis is extremely useful but often avoided due to the financial cost of archiving massive amounts of data. Retention periods are thus driven by financial realities rather than by the diff --git a/docs/reference/rollup/overview.asciidoc b/docs/reference/rollup/overview.asciidoc index 3026f7f63b0e4..67a65415c6d60 100644 --- a/docs/reference/rollup/overview.asciidoc +++ b/docs/reference/rollup/overview.asciidoc @@ -5,10 +5,7 @@ Overview ++++ -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] Time-based data (documents that are predominantly identified by their timestamp) often have associated retention policies to manage data growth. For example, your system may be generating 500 documents every second. That will generate diff --git a/docs/reference/rollup/rollup-agg-limitations.asciidoc b/docs/reference/rollup/rollup-agg-limitations.asciidoc index 72374dfeb1ba3..f6e557a27184e 100644 --- a/docs/reference/rollup/rollup-agg-limitations.asciidoc +++ b/docs/reference/rollup/rollup-agg-limitations.asciidoc @@ -2,10 +2,7 @@ [[rollup-agg-limitations]] === {rollup-cap} aggregation limitations -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] There are some limitations to how fields can be rolled up / aggregated. This page highlights the major limitations so that you are aware of them. diff --git a/docs/reference/rollup/rollup-apis.asciidoc b/docs/reference/rollup/rollup-apis.asciidoc index 8707e6a0c3217..94dab153ed9c7 100644 --- a/docs/reference/rollup/rollup-apis.asciidoc +++ b/docs/reference/rollup/rollup-apis.asciidoc @@ -2,6 +2,8 @@ [[rollup-apis]] == Rollup APIs +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] + [discrete] [[rollup-jobs-endpoint]] === Jobs diff --git a/docs/reference/rollup/rollup-getting-started.asciidoc b/docs/reference/rollup/rollup-getting-started.asciidoc index 0772a2778d9c2..7e00af05526ee 100644 --- a/docs/reference/rollup/rollup-getting-started.asciidoc +++ b/docs/reference/rollup/rollup-getting-started.asciidoc @@ -5,10 +5,7 @@ Getting started ++++ -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] To use the Rollup feature, you need to create one or more "Rollup Jobs". These jobs run continuously in the background and rollup the index or indices that you specify, placing the rolled documents in a secondary index (also of your choosing). diff --git a/docs/reference/rollup/rollup-search-limitations.asciidoc b/docs/reference/rollup/rollup-search-limitations.asciidoc index c64923811e2a1..ac44bd69722c0 100644 --- a/docs/reference/rollup/rollup-search-limitations.asciidoc +++ b/docs/reference/rollup/rollup-search-limitations.asciidoc @@ -2,10 +2,7 @@ [[rollup-search-limitations]] === {rollup-cap} search limitations -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] While we feel the Rollup function is extremely flexible, the nature of summarizing data means there will be some limitations. Once live data is thrown away, you will always lose some flexibility. diff --git a/docs/reference/rollup/understanding-groups.asciidoc b/docs/reference/rollup/understanding-groups.asciidoc index 21d061c2ae7ef..24afea110bd95 100644 --- a/docs/reference/rollup/understanding-groups.asciidoc +++ b/docs/reference/rollup/understanding-groups.asciidoc @@ -2,10 +2,7 @@ [[rollup-understanding-groups]] === Understanding groups -experimental[] - -NOTE: For version 8.5 and above we recommend <> over -rollups as a way to reduce your storage costs for time series data. +deprecated::[8.11.0,"Rollups will be removed in a future version. Use <> instead."] To preserve flexibility, Rollup Jobs are defined based on how future queries may need to use the data. Traditionally, systems force the admin to make decisions about what metrics to rollup and on what interval. E.g. The average of `cpu_time` on an hourly basis. This diff --git a/docs/reference/search-application/apis/get-search-application.asciidoc b/docs/reference/search-application/apis/get-search-application.asciidoc index 53e6df0262db8..f0c107011eb40 100644 --- a/docs/reference/search-application/apis/get-search-application.asciidoc +++ b/docs/reference/search-application/apis/get-search-application.asciidoc @@ -120,10 +120,3 @@ A sample response: } ---- // TESTRESPONSE[s/"updated_at_millis": 1682105622204/"updated_at_millis": $body.$_path/] - -[NOTE] -==== -The indices associated with a search application are not returned with the GET response. -To view the indices, use the <> API. -The alias name will match the search application name, for example `my-app` in the example above. -==== diff --git a/docs/reference/search-application/apis/put-search-application.asciidoc b/docs/reference/search-application/apis/put-search-application.asciidoc index 9cd7ee37aaa02..eb559acc8cdc7 100644 --- a/docs/reference/search-application/apis/put-search-application.asciidoc +++ b/docs/reference/search-application/apis/put-search-application.asciidoc @@ -59,6 +59,7 @@ The <> associated with this search application. (Required, object) The associated mustache template. +[[put-search-application-dictionary-param]] `dictionary`:: (Optional, object) The dictionary used to validate the parameters used with the <> API. The dictionary must be a valid JSON schema. diff --git a/docs/reference/search-application/apis/search-application-render-query.asciidoc b/docs/reference/search-application/apis/search-application-render-query.asciidoc index 326de9e38f420..687176b4fb070 100644 --- a/docs/reference/search-application/apis/search-application-render-query.asciidoc +++ b/docs/reference/search-application/apis/search-application-render-query.asciidoc @@ -8,9 +8,11 @@ preview::[] Render Search Application Query ++++ -Given specified query parameters, creates an Elasticsearch query to run. Any unspecified template parameters will be -assigned their default values if applicable. Returns the specific Elasticsearch query that would be generated and -run by calling <>. +Given specified query parameters, generates an {es} query using the search template associated with the search +application or a default template if none is specified. +Unspecified template parameters will be assigned their default values (if applicable). +Returns the specific {es} query that would be generated and executed by calling +<>. [[search-application-render-query-request]] ==== {api-request-title} @@ -27,19 +29,35 @@ Requires read privileges on the backing alias of the search application. `params`:: (Optional, map of strings to objects) -Query parameters specific to this request, which will override any defaults specified in the template. +Query parameters used to generate the {es} query from the search template associated with the search application. +If a parameter used in the search template is not specified in `params`, the parameter's default value will be used. + +[NOTE] +==== +The search application can be configured to validate search template parameters. +See the `dictionary` parameter in the <> API for more +information. +==== [[search-application-render-query-response-codes]] ==== {api-response-codes-title} +`400`:: +Invalid parameter passed to search template. +Examples include: + +- Missing required parameter +- Invalid parameter data type +- Invalid parameter value + `404`:: Search Application `` does not exist. [[search-application-render-query-example]] ==== {api-examples-title} -The following example renders a query for a search application called `my-app`. In this case, the `from` and `size` -parameters are not specified, so default values are pulled from the search application template. +The following example generates a query for a search application called `my-app` that uses the search template from +the <>: //// [source,console] @@ -99,14 +117,8 @@ POST _application/search_application/my-app/_render_query "params": { "query_string": "my first query", "text_fields": [ - { - "name": "title", - "boost": 10 - }, - { - "name": "text", - "boost": 1 - } + {"name": "title", "boost": 5}, + {"name": "description", "boost": 1} ] } } @@ -117,19 +129,21 @@ A sample response: [source,console-result] ---- { - "from": 0, - "size": 10, - "query": { - "multi_match": { - "query": "my first query", - "fields": [ - "text^1.0", - "title^10.0" - ] - } - }, - "explain": false + "from": 0, + "size": 10, + "query": { + "multi_match": { + "query": "my first query", + "fields": [ + "description^1.0", + "title^5.0" + ] + } + }, + "explain": false } ---- // TEST[continued] +In this case, the `from`, `size`, and `explain` parameters are not specified in the request, so the default values +specified in the search template are used. diff --git a/docs/reference/search-application/apis/search-application-search.asciidoc b/docs/reference/search-application/apis/search-application-search.asciidoc index 02755f1896496..b166c8aae04d0 100644 --- a/docs/reference/search-application/apis/search-application-search.asciidoc +++ b/docs/reference/search-application/apis/search-application-search.asciidoc @@ -8,8 +8,9 @@ beta::[] Search Application Search ++++ -Given specified query parameters, creates an Elasticsearch query to run. Any unspecified template parameters will be -assigned their default values if applicable. +Given specified query parameters, generates and executes an {es} query using the search template associated +with the search application or a default template if none is specified. +Unspecified template parameters will be assigned their default values (if applicable). [[search-application-search-request]] ==== {api-request-title} @@ -28,24 +29,47 @@ Requires read privileges on the backing alias of the search application. `params`:: (Optional, map of strings to objects) -Query parameters specific to this request, which will override any defaults specified in the template. +Query parameters used to generate the {es} query from the search template associated with the search application. +If a parameter used in the search template is not specified in `params`, the parameter's default value will be used. + +[NOTE] +==== +The search application can be configured to validate search template parameters. +See the `dictionary` parameter in the <> API for more +information. +==== [[search-application-search-response-codes]] ==== {api-response-codes-title} +`400`:: +Invalid parameter passed to search template. +Examples include: + +- Missing required parameter +- Invalid parameter data type +- Invalid parameter value + `404`:: Search Application `` does not exist. [[search-application-search-example]] ==== {api-examples-title} -The following example performs a search against a search application called `my-app`: +The following example executes a search against a search application called `my-app` that uses the search template from +the <>: //// [source,console] ---- PUT /index1 +PUT /index1/_doc/1?refresh=true +{ + "title": "Sample document", + "description": "A sample document that matches my first query" +} + PUT _application/search_application/my-app { "indices": ["index1"], @@ -57,7 +81,7 @@ PUT _application/search_application/my-app "query": { "multi_match": { "query": "{{query_string}}", - "fields": [{{#text_fields}}"{{name}}^{{boost}}"{{^last}},{{/last}}{{/text_fields}}] + "fields": [{{#text_fields}}"{{name}}^{{boost}}",{{/text_fields}}] } }, "explain": "{{explain}}", @@ -68,8 +92,8 @@ PUT _application/search_application/my-app "params": { "query_string": "*", "text_fields": [ - {"name": "title", "boost": 10, "last": false}, - {"name": "description", "boost": 5, "last": true} + {"name": "title", "boost": 10}, + {"name": "description", "boost": 5} ], "explain": false, "from": 0, @@ -97,23 +121,62 @@ DELETE /index1 POST _application/search_application/my-app/_search { "params": { - "value": "my first query", - "size": 10, - "from": 0, + "query_string": "my first query", "text_fields": [ - { - "name": "title", - "boost": 10 - }, - { - "name": "text", - "boost": 1 - } + {"name": "title", "boost": 5}, + {"name": "description", "boost": 1} ] } } ---- -The expected results are search results from the query that was run. +The generated {es} query would look like: +[source,console-result] +---- +{ + "from": 0, + "size": 10, + "query": { + "multi_match": { + "query": "my first query", + "fields": [ + "description^1.0", + "title^5.0" + ] + } + }, + "explain": false +} +---- +// TESTRESPONSE[skip:result of request not run in this document] + +In this case, the `from`, `size`, and `explain` parameters are not specified in the request, so the default values +specified in the search template are used. +The expected response is the search results from the {es} query that was generated & executed. +The response format is the same as that used by the <>: + +[source,console-result] +---- +{ + "took": 5, + "timed_out": false, + "_shards": { + "total": 1, + "successful": 1, + "skipped": 0, + "failed": 0 + }, + "hits": { + "total": { + "value": 1, + "relation": "eq" + }, + "max_score": 0.8630463, + "hits": ... + } +} +---- +// TESTRESPONSE[s/"took": 5/"took": $body.$_path/] +// TESTRESPONSE[s/"hits": \.\.\./"hits": $body.$_path/] diff --git a/docs/reference/search/search-your-data/search-template.asciidoc b/docs/reference/search/search-your-data/search-template.asciidoc index 9fb6ee85ffcca..7a7f09f4a37a7 100644 --- a/docs/reference/search/search-your-data/search-template.asciidoc +++ b/docs/reference/search/search-your-data/search-template.asciidoc @@ -795,7 +795,7 @@ PUT _scripts/my-search-template "lang": "mustache", "source": { "query":{ - "multi-match":{ + "multi_match":{ "query": "{{query_string}}", "fields": """[{{#text_fields}}{{user_name}},{{/text_fields}}]""" } @@ -834,7 +834,7 @@ When rendered, template outputs: { "template_output": { "query": { - "multi-match": { + "multi_match": { "query": "My string", "fields": "[John,kimchy,]" } @@ -855,7 +855,7 @@ PUT _scripts/my-search-template "lang": "mustache", "source": { "query":{ - "multi-match":{ + "multi_match":{ "query": "{{query_string}}", "fields": """[{{#text_fields}}{{user_name}}{{^last}},{{/last}}{{/text_fields}}]""" } @@ -895,7 +895,7 @@ When rendered the template outputs: { "template_output": { "query": { - "multi-match": { + "multi_match": { "query": "My string", "fields": "[John,kimchy]" } diff --git a/docs/reference/settings/data-stream-lifecycle-settings.asciidoc b/docs/reference/settings/data-stream-lifecycle-settings.asciidoc index 6860ad56fefaf..8c3f4c793e5e0 100644 --- a/docs/reference/settings/data-stream-lifecycle-settings.asciidoc +++ b/docs/reference/settings/data-stream-lifecycle-settings.asciidoc @@ -30,6 +30,27 @@ this means that your data stream will rollover if any of the following condition * or the index reaches a certain age which depends on the retention time of your data stream, * **and** has at least one document. +[[data-streams-lifecycle-target-merge-factor]] +`data_streams.lifecycle.target.merge.policy.merge_factor`:: +(<>, integer) +Data stream lifecycle implements <> by +updating the lucene merge policy factor for the target backing index. The merge factor +is both the number of segments that should be merged together, and the maximum number +of segments that we expect to find on a given tier. +This setting controls what value does <> +configures on the target index. It defaults to `16`. +The value will be visible under the `index.merge.policy.merge_factor` index setting +on the target index. + +[[data-streams-lifecycle-target-floor-segment]] +`data_streams.lifecycle.target.merge.policy.floor_segment`:: +(<>) +Data stream lifecycle implements <> by +updating the lucene merge policy floor segment for the target backing index. This floor +segment size is a way to prevent indices from having a long tail of very small segments. +This setting controls what value does <> +configures on the target index. It defaults to `100MB`. + ==== Index level settings The following index-level settings are typically configured on the backing indices of a data stream. diff --git a/docs/reference/setup/sysconfig/configuring.asciidoc b/docs/reference/setup/sysconfig/configuring.asciidoc index 4ee7017510152..912a25f22619a 100644 --- a/docs/reference/setup/sysconfig/configuring.asciidoc +++ b/docs/reference/setup/sysconfig/configuring.asciidoc @@ -20,7 +20,7 @@ require that system limits are specified in a On Linux systems, `ulimit` can be used to change resource limits on a temporary basis. Limits usually need to be set as `root` before switching to the user that will run Elasticsearch. For example, to set the number of -open file handles (`ulimit -n`) to 65,536, you can do the following: +open file handles (`ulimit -n`) to 65,535, you can do the following: [source,sh] -------------------------------- diff --git a/docs/reference/setup/sysconfig/file-descriptors.asciidoc b/docs/reference/setup/sysconfig/file-descriptors.asciidoc index 905a29d846c38..8eed9ef98305b 100644 --- a/docs/reference/setup/sysconfig/file-descriptors.asciidoc +++ b/docs/reference/setup/sysconfig/file-descriptors.asciidoc @@ -10,7 +10,7 @@ limited only by available resources. Elasticsearch uses a lot of file descriptors or file handles. Running out of file descriptors can be disastrous and will most probably lead to data loss. Make sure to increase the limit on the number of open files descriptors for -the user running Elasticsearch to 65,536 or higher. +the user running Elasticsearch to 65,535 or higher. For the `.zip` and `.tar.gz` packages, set <> as root before starting Elasticsearch, or set `nofile` to `65535` in diff --git a/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc b/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc index 190eb43d996f8..4f80ffcf3a8b9 100644 --- a/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/repo-analysis-api.asciidoc @@ -54,12 +54,15 @@ The Repository analysis API performs a collection of read and write operations on your repository which are designed to detect incorrect behaviour and to measure the performance characteristics of your storage system. -The default values for the parameters to this API are deliberately low to -reduce the impact of running an analysis inadvertently. A realistic experiment -should set `blob_count` to at least `2000`, `max_blob_size` to at least `2gb`, -and `max_total_data_size` to at least `1tb`, and will almost certainly need to -increase the `timeout` to allow time for the process to complete successfully. -You should run the analysis on a multi-node cluster of a similar size to your +The default values for the parameters to this API are deliberately low to reduce +the impact of running an analysis inadvertently and to provide a sensible +starting point for your investigations. Run your first analysis with the default +parameter values to check for simple problems. If successful, run a sequence of +increasingly large analyses until you encounter a failure or you reach a +`blob_count` of at least `2000`, a `max_blob_size` of at least `2gb`, and a +`max_total_data_size` of at least `1tb`. Always specify a generous timeout, +possibly `1h` or longer, to allow time for each analysis to run to completion. +Perform the analyses using a multi-node cluster of a similar size to your production cluster so that it can detect any problems that only arise when the repository is accessed by many nodes at once. diff --git a/docs/reference/snapshot-restore/repository-s3.asciidoc b/docs/reference/snapshot-restore/repository-s3.asciidoc index 49f0bc5fb598c..70993f5b515b3 100644 --- a/docs/reference/snapshot-restore/repository-s3.asciidoc +++ b/docs/reference/snapshot-restore/repository-s3.asciidoc @@ -257,9 +257,16 @@ PUT /_cluster/settings ---- // TEST[skip:we don't really want to change this logger] -The supplier of your storage system will be able to analyse these logs to determine the problem. See -the https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-logging.html[AWS Java SDK] -documentation for further information. +Collect the Elasticsearch logs covering the time period of the failed analysis +from all nodes in your cluster and share them with the supplier of your storage +system along with the analysis response so they can use them to determine the +problem. See the +https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/java-dg-logging.html[AWS Java SDK] +documentation for further information, including details about other loggers +that can be used to obtain even more verbose logs. When you have finished +collecting the logs needed by your supplier, set the logger settings back to +`null` to return to the default logging configuration. See <> +and <> for more information. [[repository-s3-repository]] ==== Repository settings diff --git a/gradle/build.versions.toml b/gradle/build.versions.toml index 4472fd635f905..94ed94df43818 100644 --- a/gradle/build.versions.toml +++ b/gradle/build.versions.toml @@ -16,7 +16,7 @@ checkstyle = "com.puppycrawl.tools:checkstyle:10.3" commons-codec = "commons-codec:commons-codec:1.11" commmons-io = "commons-io:commons-io:2.2" docker-compose = "com.avast.gradle:gradle-docker-compose-plugin:0.17.5" -forbiddenApis = "de.thetaphi:forbiddenapis:3.5.1" +forbiddenApis = "de.thetaphi:forbiddenapis:3.6" hamcrest = "org.hamcrest:hamcrest:2.1" httpcore = "org.apache.httpcomponents:httpcore:4.4.12" httpclient = "org.apache.httpcomponents:httpclient:4.5.10" diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3200cd14ae3a8..e340efb0c6987 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -1186,9 +1186,9 @@ - - - + + + @@ -1690,25 +1690,25 @@ - - - + + + - - + + - - - + + + - - + + - - - + + + diff --git a/libs/x-content/build.gradle b/libs/x-content/build.gradle index c51b91e2711b8..5c9dd49c007b8 100644 --- a/libs/x-content/build.gradle +++ b/libs/x-content/build.gradle @@ -8,6 +8,7 @@ import org.elasticsearch.gradle.transform.UnzipTransform +import org.elasticsearch.gradle.internal.GenerateProviderManifest import org.gradle.api.internal.artifacts.ArtifactAttributes import java.util.stream.Collectors @@ -66,20 +67,14 @@ tasks.named("dependencyLicenses").configure { mapping from: /jackson-.*/, to: 'jackson' } -File generatedResourcesDir = new File(buildDir, 'generated-resources') -def generateProviderManifest = tasks.register("generateProviderManifest") { - File manifestFile = new File(generatedResourcesDir, "LISTING.TXT") - inputs.files('jars', configurations.providerImpl).withPathSensitivity(PathSensitivity.RELATIVE) - outputs.file(manifestFile) - doLast { - manifestFile.parentFile.mkdirs() - manifestFile.setText(configurations.providerImpl.files.stream() - .map(f -> f.name).sorted().collect(Collectors.joining('\n')), 'UTF-8') - } +Directory generatedResourcesDir = layout.buildDirectory.dir('generated-resources').get() +def generateProviderManifest = tasks.register("generateProviderManifest", GenerateProviderManifest.class) { + manifestFile = generatedResourcesDir.file("LISTING.TXT") + getProviderImplClasspath().from(configurations.providerImpl) } def generateProviderImpl = tasks.register("generateProviderImpl", Sync) { - destinationDir = new File(generatedResourcesDir, "impl") + destinationDir = generatedResourcesDir.dir("impl").getAsFile() into("IMPL-JARS/x-content") { from(configurations.providerImpl) from(generateProviderManifest) diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/AdjacencyMatrixIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/AdjacencyMatrixIT.java index ae800b23b12e1..37e782cd7c611 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/AdjacencyMatrixIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/AdjacencyMatrixIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.aggregations.AggregationIntegTestCase; import org.elasticsearch.aggregations.bucket.adjacency.AdjacencyMatrix; import org.elasticsearch.aggregations.bucket.adjacency.AdjacencyMatrix.Bucket; @@ -35,7 +34,9 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -98,183 +99,180 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))) - .get(); - - assertSearchResponse(response); - - AdjacencyMatrix matrix = response.getAggregations().get("tags"); - assertThat(matrix, notNullValue()); - assertThat(matrix.getName(), equalTo("tags")); - - int expected = numMultiTagDocs > 0 ? 3 : 2; - assertThat(matrix.getBuckets().size(), equalTo(expected)); - - AdjacencyMatrix.Bucket bucket = matrix.getBucketByKey("tag1"); - assertThat(bucket, Matchers.notNullValue()); - assertThat(bucket.getDocCount(), equalTo((long) numTag1Docs)); - - bucket = matrix.getBucketByKey("tag2"); - assertThat(bucket, Matchers.notNullValue()); - assertThat(bucket.getDocCount(), equalTo((long) numTag2Docs)); - - bucket = matrix.getBucketByKey("tag1&tag2"); - if (numMultiTagDocs == 0) { - assertThat(bucket, Matchers.nullValue()); - } else { - assertThat(bucket, Matchers.notNullValue()); - assertThat(bucket.getDocCount(), equalTo((long) numMultiTagDocs)); - } - + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( + adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))) + ), + response -> { + AdjacencyMatrix matrix = response.getAggregations().get("tags"); + assertThat(matrix, notNullValue()); + assertThat(matrix.getName(), equalTo("tags")); + + int expected = numMultiTagDocs > 0 ? 3 : 2; + assertThat(matrix.getBuckets().size(), equalTo(expected)); + + AdjacencyMatrix.Bucket bucket = matrix.getBucketByKey("tag1"); + assertThat(bucket, Matchers.notNullValue()); + assertThat(bucket.getDocCount(), equalTo((long) numTag1Docs)); + + bucket = matrix.getBucketByKey("tag2"); + assertThat(bucket, Matchers.notNullValue()); + assertThat(bucket.getDocCount(), equalTo((long) numTag2Docs)); + + bucket = matrix.getBucketByKey("tag1&tag2"); + if (numMultiTagDocs == 0) { + assertThat(bucket, Matchers.nullValue()); + } else { + assertThat(bucket, Matchers.notNullValue()); + assertThat(bucket.getDocCount(), equalTo((long) numMultiTagDocs)); + } + } + ); } public void testCustomSeparator() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", "\t", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")))) - .get(); - - assertSearchResponse(response); - - AdjacencyMatrix matrix = response.getAggregations().get("tags"); - assertThat(matrix, notNullValue()); - - AdjacencyMatrix.Bucket bucket = matrix.getBucketByKey("tag1\ttag2"); - if (numMultiTagDocs == 0) { - assertThat(bucket, Matchers.nullValue()); - } else { - assertThat(bucket, Matchers.notNullValue()); - assertThat(bucket.getDocCount(), equalTo((long) numMultiTagDocs)); - } - + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( + adjacencyMatrix("tags", "\t", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))) + ), + response -> { + AdjacencyMatrix matrix = response.getAggregations().get("tags"); + assertThat(matrix, notNullValue()); + + AdjacencyMatrix.Bucket bucket = matrix.getBucketByKey("tag1\ttag2"); + if (numMultiTagDocs == 0) { + assertThat(bucket, Matchers.nullValue()); + } else { + assertThat(bucket, Matchers.notNullValue()); + assertThat(bucket.getDocCount(), equalTo((long) numMultiTagDocs)); + } + } + ); } // See NullPointer issue when filters are empty: // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation(adjacencyMatrix("tags", newMap("all", emptyFilter).add("tag1", termQuery("tag", "tag1")))) - .get(); - - assertSearchResponse(response); - - AdjacencyMatrix filters = response.getAggregations().get("tags"); - assertThat(filters, notNullValue()); - AdjacencyMatrix.Bucket allBucket = filters.getBucketByKey("all"); - assertThat(allBucket.getDocCount(), equalTo((long) numDocs)); - - AdjacencyMatrix.Bucket bucket = filters.getBucketByKey("tag1"); - assertThat(bucket, Matchers.notNullValue()); - assertThat(bucket.getDocCount(), equalTo((long) numTag1Docs)); + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation(adjacencyMatrix("tags", newMap("all", emptyFilter).add("tag1", termQuery("tag", "tag1")))), + response -> { + AdjacencyMatrix filters = response.getAggregations().get("tags"); + assertThat(filters, notNullValue()); + AdjacencyMatrix.Bucket allBucket = filters.getBucketByKey("all"); + assertThat(allBucket.getDocCount(), equalTo((long) numDocs)); + + AdjacencyMatrix.Bucket bucket = filters.getBucketByKey("tag1"); + assertThat(bucket, Matchers.notNullValue()); + assertThat(bucket.getDocCount(), equalTo((long) numTag1Docs)); + } + ); } public void testWithSubAggregation() throws Exception { BoolQueryBuilder boolQ = new BoolQueryBuilder(); boolQ.must(termQuery("tag", "tag1")); boolQ.must(termQuery("tag", "tag2")); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2")).add("both", boolQ)) .subAggregation(avg("avg_value").field("value")) - ) - .get(); - - assertSearchResponse(response); - - AdjacencyMatrix matrix = response.getAggregations().get("tags"); - assertThat(matrix, notNullValue()); - assertThat(matrix.getName(), equalTo("tags")); - - int expectedBuckets = 0; - if (numTag1Docs > 0) { - expectedBuckets++; - } - if (numTag2Docs > 0) { - expectedBuckets++; - } - if (numMultiTagDocs > 0) { - // both, both&tag1, both&tag2, tag1&tag2 - expectedBuckets += 4; - } - - assertThat(matrix.getBuckets().size(), equalTo(expectedBuckets)); - assertThat(((InternalAggregation) matrix).getProperty("_bucket_count"), equalTo(expectedBuckets)); - - Object[] propertiesKeys = (Object[]) ((InternalAggregation) matrix).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) matrix).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation) matrix).getProperty("avg_value.value"); - - assertEquals(expectedBuckets, propertiesKeys.length); - assertEquals(propertiesKeys.length, propertiesDocCounts.length); - assertEquals(propertiesKeys.length, propertiesCounts.length); - - for (int i = 0; i < propertiesCounts.length; i++) { - AdjacencyMatrix.Bucket bucket = matrix.getBucketByKey(propertiesKeys[i].toString()); - assertThat(bucket, Matchers.notNullValue()); - Avg avgValue = bucket.getAggregations().get("avg_value"); - assertThat(avgValue, notNullValue()); - assertThat((long) propertiesDocCounts[i], equalTo(bucket.getDocCount())); - assertThat((double) propertiesCounts[i], equalTo(avgValue.getValue())); - } - - AdjacencyMatrix.Bucket tag1Bucket = matrix.getBucketByKey("tag1"); - assertThat(tag1Bucket, Matchers.notNullValue()); - assertThat(tag1Bucket.getDocCount(), equalTo((long) numTag1Docs)); - long sum = 0; - for (int i = 0; i < numSingleTag1Docs; i++) { - sum += i + 1; - } - for (int i = numSingleTag1Docs + numSingleTag2Docs; i < numDocs; i++) { - sum += i + 1; - } - assertThat(tag1Bucket.getAggregations().asList().isEmpty(), is(false)); - Avg avgBucket1Value = tag1Bucket.getAggregations().get("avg_value"); - assertThat(avgBucket1Value, notNullValue()); - assertThat(avgBucket1Value.getName(), equalTo("avg_value")); - assertThat(avgBucket1Value.getValue(), equalTo((double) sum / numTag1Docs)); - - Bucket tag2Bucket = matrix.getBucketByKey("tag2"); - assertThat(tag2Bucket, Matchers.notNullValue()); - assertThat(tag2Bucket.getDocCount(), equalTo((long) numTag2Docs)); - sum = 0; - for (int i = numSingleTag1Docs; i < numDocs; i++) { - sum += i + 1; - } - assertThat(tag2Bucket.getAggregations().asList().isEmpty(), is(false)); - Avg avgBucket2Value = tag2Bucket.getAggregations().get("avg_value"); - assertThat(avgBucket2Value, notNullValue()); - assertThat(avgBucket2Value.getName(), equalTo("avg_value")); - assertThat(avgBucket2Value.getValue(), equalTo((double) sum / numTag2Docs)); - - // Check intersection buckets are computed correctly by comparing with - // ANDed query bucket results - Bucket bucketBothQ = matrix.getBucketByKey("both"); - if (numMultiTagDocs == 0) { - // Empty intersections are not returned. - assertThat(bucketBothQ, Matchers.nullValue()); - Bucket bucketIntersectQ = matrix.getBucketByKey("tag1&tag2"); - assertThat(bucketIntersectQ, Matchers.nullValue()); - Bucket tag1Both = matrix.getBucketByKey("both&tag1"); - assertThat(tag1Both, Matchers.nullValue()); - } else { - assertThat(bucketBothQ, Matchers.notNullValue()); - assertThat(bucketBothQ.getDocCount(), equalTo((long) numMultiTagDocs)); - Avg avgValueBothQ = bucketBothQ.getAggregations().get("avg_value"); - - Bucket bucketIntersectQ = matrix.getBucketByKey("tag1&tag2"); - assertThat(bucketIntersectQ, Matchers.notNullValue()); - assertThat(bucketIntersectQ.getDocCount(), equalTo((long) numMultiTagDocs)); - Avg avgValueIntersectQ = bucketBothQ.getAggregations().get("avg_value"); - assertThat(avgValueIntersectQ.getValue(), equalTo(avgValueBothQ.getValue())); - - Bucket tag1Both = matrix.getBucketByKey("both&tag1"); - assertThat(tag1Both, Matchers.notNullValue()); - assertThat(tag1Both.getDocCount(), equalTo((long) numMultiTagDocs)); - Avg avgValueTag1BothIntersectQ = tag1Both.getAggregations().get("avg_value"); - assertThat(avgValueTag1BothIntersectQ.getValue(), equalTo(avgValueBothQ.getValue())); - } - + ), + response -> { + AdjacencyMatrix matrix = response.getAggregations().get("tags"); + assertThat(matrix, notNullValue()); + assertThat(matrix.getName(), equalTo("tags")); + + int expectedBuckets = 0; + if (numTag1Docs > 0) { + expectedBuckets++; + } + if (numTag2Docs > 0) { + expectedBuckets++; + } + if (numMultiTagDocs > 0) { + // both, both&tag1, both&tag2, tag1&tag2 + expectedBuckets += 4; + } + + assertThat(matrix.getBuckets().size(), equalTo(expectedBuckets)); + assertThat(((InternalAggregation) matrix).getProperty("_bucket_count"), equalTo(expectedBuckets)); + + Object[] propertiesKeys = (Object[]) ((InternalAggregation) matrix).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) matrix).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) matrix).getProperty("avg_value.value"); + + assertEquals(expectedBuckets, propertiesKeys.length); + assertEquals(propertiesKeys.length, propertiesDocCounts.length); + assertEquals(propertiesKeys.length, propertiesCounts.length); + + for (int i = 0; i < propertiesCounts.length; i++) { + AdjacencyMatrix.Bucket bucket = matrix.getBucketByKey(propertiesKeys[i].toString()); + assertThat(bucket, Matchers.notNullValue()); + Avg avgValue = bucket.getAggregations().get("avg_value"); + assertThat(avgValue, notNullValue()); + assertThat((long) propertiesDocCounts[i], equalTo(bucket.getDocCount())); + assertThat((double) propertiesCounts[i], equalTo(avgValue.getValue())); + } + + AdjacencyMatrix.Bucket tag1Bucket = matrix.getBucketByKey("tag1"); + assertThat(tag1Bucket, Matchers.notNullValue()); + assertThat(tag1Bucket.getDocCount(), equalTo((long) numTag1Docs)); + long sum = 0; + for (int i = 0; i < numSingleTag1Docs; i++) { + sum += i + 1; + } + for (int i = numSingleTag1Docs + numSingleTag2Docs; i < numDocs; i++) { + sum += i + 1; + } + assertThat(tag1Bucket.getAggregations().asList().isEmpty(), is(false)); + Avg avgBucket1Value = tag1Bucket.getAggregations().get("avg_value"); + assertThat(avgBucket1Value, notNullValue()); + assertThat(avgBucket1Value.getName(), equalTo("avg_value")); + assertThat(avgBucket1Value.getValue(), equalTo((double) sum / numTag1Docs)); + + Bucket tag2Bucket = matrix.getBucketByKey("tag2"); + assertThat(tag2Bucket, Matchers.notNullValue()); + assertThat(tag2Bucket.getDocCount(), equalTo((long) numTag2Docs)); + sum = 0; + for (int i = numSingleTag1Docs; i < numDocs; i++) { + sum += i + 1; + } + assertThat(tag2Bucket.getAggregations().asList().isEmpty(), is(false)); + Avg avgBucket2Value = tag2Bucket.getAggregations().get("avg_value"); + assertThat(avgBucket2Value, notNullValue()); + assertThat(avgBucket2Value.getName(), equalTo("avg_value")); + assertThat(avgBucket2Value.getValue(), equalTo((double) sum / numTag2Docs)); + + // Check intersection buckets are computed correctly by comparing with + // ANDed query bucket results + Bucket bucketBothQ = matrix.getBucketByKey("both"); + if (numMultiTagDocs == 0) { + // Empty intersections are not returned. + assertThat(bucketBothQ, Matchers.nullValue()); + Bucket bucketIntersectQ = matrix.getBucketByKey("tag1&tag2"); + assertThat(bucketIntersectQ, Matchers.nullValue()); + Bucket tag1Both = matrix.getBucketByKey("both&tag1"); + assertThat(tag1Both, Matchers.nullValue()); + } else { + assertThat(bucketBothQ, Matchers.notNullValue()); + assertThat(bucketBothQ.getDocCount(), equalTo((long) numMultiTagDocs)); + Avg avgValueBothQ = bucketBothQ.getAggregations().get("avg_value"); + + Bucket bucketIntersectQ = matrix.getBucketByKey("tag1&tag2"); + assertThat(bucketIntersectQ, Matchers.notNullValue()); + assertThat(bucketIntersectQ.getDocCount(), equalTo((long) numMultiTagDocs)); + Avg avgValueIntersectQ = bucketBothQ.getAggregations().get("avg_value"); + assertThat(avgValueIntersectQ.getValue(), equalTo(avgValueBothQ.getValue())); + + Bucket tag1Both = matrix.getBucketByKey("both&tag1"); + assertThat(tag1Both, Matchers.notNullValue()); + assertThat(tag1Both.getDocCount(), equalTo((long) numMultiTagDocs)); + Avg avgValueTag1BothIntersectQ = tag1Both.getAggregations().get("avg_value"); + assertThat(avgValueTag1BothIntersectQ.getValue(), equalTo(avgValueBothQ.getValue())); + } + } + ); } public void testTooLargeMatrix() { @@ -291,7 +289,7 @@ public void testTooLargeMatrix() { } try { - client().prepareSearch("idx").addAggregation(adjacencyMatrix("tags", "\t", filtersMap)).get(); + prepareSearch("idx").addAggregation(adjacencyMatrix("tags", "\t", filtersMap)).get(); fail("SearchPhaseExecutionException should have been thrown"); } catch (SearchPhaseExecutionException ex) { assertThat(ex.getCause().getMessage(), containsString("Number of filters is too large")); @@ -303,37 +301,34 @@ public void testTooLargeMatrix() { } public void testAsSubAggregation() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( histogram("histo").field("value").interval(2L).subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery()))) - ) - .get(); - - assertSearchResponse(response); - - Histogram histo = response.getAggregations().get("histo"); - assertThat(histo, notNullValue()); - assertThat(histo.getBuckets().size(), greaterThanOrEqualTo(1)); - - for (Histogram.Bucket bucket : histo.getBuckets()) { - AdjacencyMatrix matrix = bucket.getAggregations().get("matrix"); - assertThat(matrix, notNullValue()); - assertThat(matrix.getBuckets().size(), equalTo(1)); - AdjacencyMatrix.Bucket filterBucket = matrix.getBuckets().get(0); - assertEquals(bucket.getDocCount(), filterBucket.getDocCount()); - } + ), + response -> { + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getBuckets().size(), greaterThanOrEqualTo(1)); + + for (Histogram.Bucket bucket : histo.getBuckets()) { + AdjacencyMatrix matrix = bucket.getAggregations().get("matrix"); + assertThat(matrix, notNullValue()); + assertThat(matrix.getBuckets().size(), equalTo(1)); + AdjacencyMatrix.Bucket filterBucket = matrix.getBuckets().get(0); + assertEquals(bucket.getDocCount(), filterBucket.getDocCount()); + } + } + ); } public void testWithContextBasedSubAggregation() throws Exception { try { - client().prepareSearch("idx") - .addAggregation( - adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))).subAggregation( - avg("avg_value") - ) + prepareSearch("idx").addAggregation( + adjacencyMatrix("tags", newMap("tag1", termQuery("tag", "tag1")).add("tag2", termQuery("tag", "tag2"))).subAggregation( + avg("avg_value") ) - .get(); + ).get(); fail( "expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" @@ -346,26 +341,27 @@ public void testWithContextBasedSubAggregation() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) - .addAggregation( - histogram("histo").field("value") - .interval(1L) - .minDocCount(0) - .subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery()))) - ) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - Histogram histo = searchResponse.getAggregations().get("histo"); - assertThat(histo, Matchers.notNullValue()); - Histogram.Bucket bucket = histo.getBuckets().get(1); - assertThat(bucket, Matchers.notNullValue()); - - AdjacencyMatrix matrix = bucket.getAggregations().get("matrix"); - assertThat(matrix, notNullValue()); - AdjacencyMatrix.Bucket all = matrix.getBucketByKey("all"); - assertThat(all, Matchers.nullValue()); + assertResponse( + prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) + .addAggregation( + histogram("histo").field("value") + .interval(1L) + .minDocCount(0) + .subAggregation(adjacencyMatrix("matrix", newMap("all", matchAllQuery()))) + ), + response -> { + assertHitCount(response, 2L); + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, Matchers.notNullValue()); + Histogram.Bucket bucket = histo.getBuckets().get(1); + assertThat(bucket, Matchers.notNullValue()); + + AdjacencyMatrix matrix = bucket.getAggregations().get("matrix"); + assertThat(matrix, notNullValue()); + AdjacencyMatrix.Bucket all = matrix.getBucketByKey("all"); + assertThat(all, Matchers.nullValue()); + } + ); } // Helper methods for building maps of QueryBuilders diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java index 8363aa310a451..4d64ad1030136 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/SearchCancellationIT.java @@ -97,8 +97,7 @@ public void testCancellationDuringTimeSeriesAggregation() throws Exception { logger.info("Executing search"); TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg"); - ActionFuture searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + ActionFuture searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addAggregation( timeSeriesAggregationBuilder.subAggregation( new ScriptedMetricAggregationBuilder("sub_agg").initScript( diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java index 3100db781172a..3f7d52c32e8df 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesAggregationsIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.aggregations.AggregationIntegTestCase; import org.elasticsearch.aggregations.bucket.timeseries.InternalTimeSeries; @@ -58,7 +57,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -127,7 +126,7 @@ public void setupSuiteScopeCluster() throws Exception { .put("time_series.start_time", boundaries[i]) .put("time_series.end_time", boundaries[i + 1]) .build() - ).setMapping(builder).addAlias(new Alias("index")).get() + ).setMapping(builder).addAlias(new Alias("index")) ); } @@ -177,86 +176,92 @@ public void setupSuiteScopeCluster() throws Exception { } public void testStandAloneTimeSeriesAgg() { - SearchResponse response = client().prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")).get(); - assertSearchResponse(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - InternalTimeSeries timeSeries = aggregations.get("by_ts"); - assertThat( - timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), - equalTo(data.keySet()) - ); - for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { - @SuppressWarnings("unchecked") - Map key = (Map) bucket.getKey(); - assertThat((long) data.get(key).size(), equalTo(bucket.getDocCount())); - } - } - - public void testTimeSeriesGroupedByADimension() { - String groupBy = "dim_" + randomIntBetween(0, numberOfDimensions - 1); - SearchResponse response = client().prepareSearch("index") - .setSize(0) - .addAggregation( - terms("by_dim").field(groupBy) - .size(data.size()) - .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) - .subAggregation(timeSeries("by_ts")) - ) - .get(); - assertSearchResponse(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - Terms terms = aggregations.get("by_dim"); - Set> keys = new HashSet<>(); - for (Terms.Bucket term : terms.getBuckets()) { - InternalTimeSeries timeSeries = term.getAggregations().get("by_ts"); + assertNoFailuresAndResponse(prepareSearch("index").setSize(0).addAggregation(timeSeries("by_ts")), response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + InternalTimeSeries timeSeries = aggregations.get("by_ts"); + assertThat( + timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), + equalTo(data.keySet()) + ); for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { @SuppressWarnings("unchecked") Map key = (Map) bucket.getKey(); assertThat((long) data.get(key).size(), equalTo(bucket.getDocCount())); - assertTrue("key is not unique", keys.add(key)); - assertThat("time series doesn't contain dimensions we grouped by", key.get(groupBy), equalTo(term.getKeyAsString())); } - } - assertThat(keys, equalTo(data.keySet())); + }); + } + + public void testTimeSeriesGroupedByADimension() { + String groupBy = "dim_" + randomIntBetween(0, numberOfDimensions - 1); + assertNoFailuresAndResponse( + prepareSearch("index").setSize(0) + .addAggregation( + terms("by_dim").field(groupBy) + .size(data.size()) + .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) + .subAggregation(timeSeries("by_ts")) + ), + response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + Terms terms = aggregations.get("by_dim"); + Set> keys = new HashSet<>(); + for (Terms.Bucket term : terms.getBuckets()) { + InternalTimeSeries timeSeries = term.getAggregations().get("by_ts"); + for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { + @SuppressWarnings("unchecked") + Map key = (Map) bucket.getKey(); + assertThat((long) data.get(key).size(), equalTo(bucket.getDocCount())); + assertTrue("key is not unique", keys.add(key)); + assertThat( + "time series doesn't contain dimensions we grouped by", + key.get(groupBy), + equalTo(term.getKeyAsString()) + ); + } + } + assertThat(keys, equalTo(data.keySet())); + } + ); } public void testTimeSeriesGroupedByDateHistogram() { DateHistogramInterval fixedInterval = DateHistogramInterval.days(randomIntBetween(10, 100)); - SearchResponse response = client().prepareSearch("index") - .setSize(0) - .addAggregation( - dateHistogram("by_time").field("@timestamp") - .fixedInterval(fixedInterval) - .subAggregation(timeSeries("by_ts").subAggregation(stats("timestamp").field("@timestamp"))) - ) - .get(); - assertSearchResponse(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - Histogram histogram = aggregations.get("by_time"); - Map, Long> keys = new HashMap<>(); - for (Histogram.Bucket interval : histogram.getBuckets()) { - long intervalStart = ((ZonedDateTime) interval.getKey()).toEpochSecond() * 1000; - long intervalEnd = intervalStart + fixedInterval.estimateMillis(); - InternalTimeSeries timeSeries = interval.getAggregations().get("by_ts"); - for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { - @SuppressWarnings("unchecked") - Map key = (Map) bucket.getKey(); - keys.compute(key, (k, v) -> (v == null ? 0 : v) + bucket.getDocCount()); - assertThat(bucket.getDocCount(), lessThanOrEqualTo((long) data.get(key).size())); - Stats stats = bucket.getAggregations().get("timestamp"); - long minTimestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(stats.getMinAsString()); - long maxTimestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(stats.getMaxAsString()); - assertThat(minTimestamp, greaterThanOrEqualTo(intervalStart)); - assertThat(maxTimestamp, lessThan(intervalEnd)); + assertNoFailuresAndResponse( + prepareSearch("index").setSize(0) + .addAggregation( + dateHistogram("by_time").field("@timestamp") + .fixedInterval(fixedInterval) + .subAggregation(timeSeries("by_ts").subAggregation(stats("timestamp").field("@timestamp"))) + ), + response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + Histogram histogram = aggregations.get("by_time"); + Map, Long> keys = new HashMap<>(); + for (Histogram.Bucket interval : histogram.getBuckets()) { + long intervalStart = ((ZonedDateTime) interval.getKey()).toEpochSecond() * 1000; + long intervalEnd = intervalStart + fixedInterval.estimateMillis(); + InternalTimeSeries timeSeries = interval.getAggregations().get("by_ts"); + for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { + @SuppressWarnings("unchecked") + Map key = (Map) bucket.getKey(); + keys.compute(key, (k, v) -> (v == null ? 0 : v) + bucket.getDocCount()); + assertThat(bucket.getDocCount(), lessThanOrEqualTo((long) data.get(key).size())); + Stats stats = bucket.getAggregations().get("timestamp"); + long minTimestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(stats.getMinAsString()); + long maxTimestamp = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis(stats.getMaxAsString()); + assertThat(minTimestamp, greaterThanOrEqualTo(intervalStart)); + assertThat(maxTimestamp, lessThan(intervalEnd)); + } + } + assertThat(keys.keySet(), equalTo(data.keySet())); + for (Map.Entry, Long> entry : keys.entrySet()) { + assertThat(entry.getValue(), equalTo((long) data.get(entry.getKey()).size())); + } } - } - assertThat(keys.keySet(), equalTo(data.keySet())); - for (Map.Entry, Long> entry : keys.entrySet()) { - assertThat(entry.getValue(), equalTo((long) data.get(entry.getKey()).size())); - } + ); } public void testStandAloneTimeSeriesAggWithDimFilter() { @@ -267,25 +272,24 @@ public void testStandAloneTimeSeriesAggWithDimFilter() { if (include == false) { queryBuilder = QueryBuilders.boolQuery().mustNot(queryBuilder); } - SearchResponse response = client().prepareSearch("index") - .setQuery(queryBuilder) - .setSize(0) - .addAggregation(timeSeries("by_ts")) - .get(); - assertSearchResponse(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - InternalTimeSeries timeSeries = aggregations.get("by_ts"); - Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); - assertThat( - timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), - equalTo(filteredData.keySet()) + assertNoFailuresAndResponse( + prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")), + response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + InternalTimeSeries timeSeries = aggregations.get("by_ts"); + Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); + assertThat( + timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), + equalTo(filteredData.keySet()) + ); + for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { + @SuppressWarnings("unchecked") + Map key = (Map) bucket.getKey(); + assertThat(bucket.getDocCount(), equalTo((long) filteredData.get(key).size())); + } + } ); - for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { - @SuppressWarnings("unchecked") - Map key = (Map) bucket.getKey(); - assertThat(bucket.getDocCount(), equalTo((long) filteredData.get(key).size())); - } } public void testStandAloneTimeSeriesAggWithGlobalAggregation() { @@ -297,38 +301,38 @@ public void testStandAloneTimeSeriesAggWithGlobalAggregation() { if (include == false) { queryBuilder = QueryBuilders.boolQuery().mustNot(queryBuilder); } - SearchResponse response = client().prepareSearch("index") - .setQuery(queryBuilder) - .setSize(0) - .addAggregation(timeSeries("by_ts").subAggregation(sum("filter_sum").field("metric_" + metric))) - .addAggregation(global("everything").subAggregation(sum("all_sum").field("metric_" + metric))) - .addAggregation(PipelineAggregatorBuilders.sumBucket("total_filter_sum", "by_ts>filter_sum")) - .get(); - assertSearchResponse(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - InternalTimeSeries timeSeries = aggregations.get("by_ts"); - Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); - assertThat( - timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), - equalTo(filteredData.keySet()) - ); - for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { - @SuppressWarnings("unchecked") - Map key = (Map) bucket.getKey(); - assertThat(bucket.getDocCount(), equalTo((long) filteredData.get(key).size())); - } - SimpleValue obj = aggregations.get("total_filter_sum"); - assertThat(obj.value(), closeTo(sumByMetric(filteredData, "metric_" + metric), obj.value() * 0.0001)); + assertNoFailuresAndResponse( + prepareSearch("index").setQuery(queryBuilder) + .setSize(0) + .addAggregation(timeSeries("by_ts").subAggregation(sum("filter_sum").field("metric_" + metric))) + .addAggregation(global("everything").subAggregation(sum("all_sum").field("metric_" + metric))) + .addAggregation(PipelineAggregatorBuilders.sumBucket("total_filter_sum", "by_ts>filter_sum")), + response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + InternalTimeSeries timeSeries = aggregations.get("by_ts"); + Map, Map>> filteredData = dataFilteredByDimension("dim_" + dim, val, include); + assertThat( + timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), + equalTo(filteredData.keySet()) + ); + for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { + @SuppressWarnings("unchecked") + Map key = (Map) bucket.getKey(); + assertThat(bucket.getDocCount(), equalTo((long) filteredData.get(key).size())); + } + SimpleValue obj = aggregations.get("total_filter_sum"); + assertThat(obj.value(), closeTo(sumByMetric(filteredData, "metric_" + metric), obj.value() * 0.0001)); - Global global = aggregations.get("everything"); - Sum allSum = global.getAggregations().get("all_sum"); - assertThat(allSum.value(), closeTo(sumByMetric(data, "metric_" + metric), allSum.value() * 0.0001)); + Global global = aggregations.get("everything"); + Sum allSum = global.getAggregations().get("all_sum"); + assertThat(allSum.value(), closeTo(sumByMetric(data, "metric_" + metric), allSum.value() * 0.0001)); + } + ); ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch("index") - .setQuery(QueryBuilders.termQuery("dim_" + dim, val)) + () -> prepareSearch("index").setQuery(QueryBuilders.termQuery("dim_" + dim, val)) .setSize(0) .addAggregation(global("everything").subAggregation(timeSeries("by_ts"))) .get() @@ -346,25 +350,29 @@ public void testStandAloneTimeSeriesAggWithMetricFilter() { } else { queryBuilder.lte(val); } - SearchResponse response = client().prepareSearch("index") - .setQuery(queryBuilder) - .setSize(0) - .addAggregation(timeSeries("by_ts")) - .get(); - assertSearchResponse(response); - Aggregations aggregations = response.getAggregations(); - assertNotNull(aggregations); - InternalTimeSeries timeSeries = aggregations.get("by_ts"); - Map, Map>> filteredData = dataFilteredByMetric(data, "metric_" + metric, val, above); - assertThat( - timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), - equalTo(filteredData.keySet()) + assertNoFailuresAndResponse( + prepareSearch("index").setQuery(queryBuilder).setSize(0).addAggregation(timeSeries("by_ts")), + response -> { + Aggregations aggregations = response.getAggregations(); + assertNotNull(aggregations); + InternalTimeSeries timeSeries = aggregations.get("by_ts"); + Map, Map>> filteredData = dataFilteredByMetric( + data, + "metric_" + metric, + val, + above + ); + assertThat( + timeSeries.getBuckets().stream().map(MultiBucketsAggregation.Bucket::getKey).collect(Collectors.toSet()), + equalTo(filteredData.keySet()) + ); + for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { + @SuppressWarnings("unchecked") + Map key = (Map) bucket.getKey(); + assertThat(bucket.getDocCount(), equalTo((long) filteredData.get(key).size())); + } + } ); - for (InternalTimeSeries.Bucket bucket : timeSeries.getBuckets()) { - @SuppressWarnings("unchecked") - Map key = (Map) bucket.getKey(); - assertThat(bucket.getDocCount(), equalTo((long) filteredData.get(key).size())); - } } public void testRetrievingHits() { @@ -381,8 +389,7 @@ public void testRetrievingHits() { int expectedSize = count(filteredData); ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch("index") - .setQuery(queryBuilder) + () -> prepareSearch("index").setQuery(queryBuilder) .setSize(expectedSize * 2) .addAggregation(timeSeries("by_ts").subAggregation(topHits("hits").size(100))) .addAggregation(topHits("top_hits").size(100)) // top level top hits @@ -492,7 +499,7 @@ public void testGetHitsFailure() throws Exception { .put("time_series.end_time", "2022-01-01T00:00:00Z") .put("number_of_shards", 1) .build() - ).setMapping("key", "type=keyword,time_series_dimension=true", "val", "type=double").get() + ).setMapping("key", "type=keyword,time_series_dimension=true", "val", "type=double") ); client().prepareBulk() @@ -516,13 +523,9 @@ public void testGetHitsFailure() throws Exception { QueryBuilder queryBuilder = QueryBuilders.rangeQuery("@timestamp").lte("2021-01-01T00:10:00Z"); assertNoFailures( - client().prepareSearch("test") - .setQuery(queryBuilder) - .setSize(10) - .addSort("key", SortOrder.ASC) - .addSort("@timestamp", SortOrder.ASC) + prepareSearch("test").setQuery(queryBuilder).setSize(10).addSort("key", SortOrder.ASC).addSort("@timestamp", SortOrder.ASC) ); - assertNoFailures(client().prepareSearch("test").setQuery(queryBuilder).setSize(10).addAggregation(timeSeries("by_ts"))); + assertNoFailures(prepareSearch("test").setQuery(queryBuilder).setSize(10).addAggregation(timeSeries("by_ts"))); assertAcked(indicesAdmin().delete(new DeleteIndexRequest("test")).actionGet()); } diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesNestedAggregationsIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesNestedAggregationsIT.java index 0eba8cd5ace14..7fddc65ac3e03 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesNestedAggregationsIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/bucket/TimeSeriesNestedAggregationsIT.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.aggregations.AggregationIntegTestCase; import org.elasticsearch.aggregations.bucket.timeseries.InternalTimeSeries; @@ -42,6 +41,8 @@ import java.util.TreeSet; import java.util.function.Supplier; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; + public class TimeSeriesNestedAggregationsIT extends AggregationIntegTestCase { private static int numberOfDimensions; private static int numberOfDocuments; @@ -155,48 +156,56 @@ private static String formatDim(int dimId) { public void testTimeSeriesAggregation() { final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts"); - final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get(); - final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0); - assertTimeSeriesAggregation(ts); + assertResponse(prepareSearch("index").addAggregation(timeSeries).setSize(0), response -> { + final InternalTimeSeries ts = (InternalTimeSeries) response.getAggregations().asList().get(0); + assertTimeSeriesAggregation(ts); + }); } public void testSumByTsid() { final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation( new SumAggregationBuilder("sum").field("gauge_metric") ); - final SearchResponse searchResponse = client().prepareSearch("index").setQuery(new MatchAllQueryBuilder()).get(); - assertNotEquals(numberOfDocuments, searchResponse.getHits().getHits().length); - final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get(); - final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0); - assertTimeSeriesAggregation(ts); + assertResponse( + prepareSearch("index").setQuery(new MatchAllQueryBuilder()), + response -> assertNotEquals(numberOfDocuments, response.getHits().getHits().length) + ); + + assertResponse(prepareSearch("index").addAggregation(timeSeries).setSize(0), response -> { + final InternalTimeSeries ts = (InternalTimeSeries) response.getAggregations().asList().get(0); + assertTimeSeriesAggregation(ts); + }); } public void testTermsByTsid() { final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation( new TermsAggregationBuilder("terms").field("dim_0") ); - final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get(); - final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0); - assertTimeSeriesAggregation(ts); + assertResponse(prepareSearch("index").addAggregation(timeSeries).setSize(0), response -> { + final InternalTimeSeries ts = (InternalTimeSeries) response.getAggregations().asList().get(0); + assertTimeSeriesAggregation(ts); + }); } public void testDateHistogramByTsid() { final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation( new DateHistogramAggregationBuilder("date_histogram").field("@timestamp").calendarInterval(DateHistogramInterval.HOUR) ); - final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get(); - final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0); - assertTimeSeriesAggregation(ts); + assertResponse(prepareSearch("index").addAggregation(timeSeries).setSize(0), response -> { + final InternalTimeSeries ts = (InternalTimeSeries) response.getAggregations().asList().get(0); + assertTimeSeriesAggregation(ts); + }); } public void testCardinalityByTsid() { final TimeSeriesAggregationBuilder timeSeries = new TimeSeriesAggregationBuilder("ts").subAggregation( new CardinalityAggregationBuilder("dim_n_cardinality").field(formatDim(numberOfDimensions - 1)) ); - final SearchResponse aggregationResponse = client().prepareSearch("index").addAggregation(timeSeries).setSize(0).get(); - final InternalTimeSeries ts = (InternalTimeSeries) aggregationResponse.getAggregations().asList().get(0); - assertTimeSeriesAggregation(ts); - ts.getBuckets().forEach(bucket -> { assertCardinality(bucket.getAggregations().get("dim_n_cardinality"), 1); }); + assertResponse(prepareSearch("index").addAggregation(timeSeries).setSize(0), response -> { + final InternalTimeSeries ts = (InternalTimeSeries) response.getAggregations().asList().get(0); + assertTimeSeriesAggregation(ts); + ts.getBuckets().forEach(bucket -> { assertCardinality(bucket.getAggregations().get("dim_n_cardinality"), 1); }); + }); } private static void assertTimeSeriesAggregation(final InternalTimeSeries timeSeriesAggregation) { diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java index 628863387b157..14bae46e1e00f 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/DateDerivativeIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.aggregations.AggregationsPlugin; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatters; @@ -39,7 +38,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -116,95 +115,93 @@ public void afterEachTest() throws IOException { } public void testSingleValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(3)); - - ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(1L)); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, nullValue()); - - key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(1); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(2L)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(1d)); - - key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(2); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(3L)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(1d)); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(1L)); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, nullValue()); + + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(2L)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(1d)); + + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(3L)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(1d)); + } + ); } public void testSingleValuedFieldNormalised() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.DAY)) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(3)); - - ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); - assertThat(bucket, notNullValue()); - assertThat(bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(1L)); - Derivative docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, nullValue()); - - key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(1); - assertThat(bucket, notNullValue()); - assertThat(bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(2L)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), closeTo(1d, 0.00001)); - assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 31d, 0.00001)); - - key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(2); - assertThat(bucket, notNullValue()); - assertThat(bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(3L)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), closeTo(1d, 0.00001)); - assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 29d, 0.00001)); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(1L)); + Derivative docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, nullValue()); + + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(2L)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), closeTo(1d, 0.00001)); + assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 31d, 0.00001)); + + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat(bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(3L)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), closeTo(1d, 0.00001)); + assertThat(docCountDeriv.normalizedValue(), closeTo(1d / 29d, 0.00001)); + } + ); } /** @@ -225,45 +222,44 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstStart() throws Excep indexRandom(true, builders); ensureSearchable(); - SearchResponse response = client().prepareSearch(IDX_DST_START) - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch(IDX_DST_START).addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.DAY) .timeZone(timezone) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.HOUR)) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(4)); - - DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd"); - ZonedDateTime expectedKeyFirstBucket = LocalDate.from(dateFormatter.parse("2012-03-24")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); - - ZonedDateTime expectedKeySecondBucket = LocalDate.from(dateFormatter.parse("2012-03-25")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); - - // the following is normalized using a 23h bucket width - ZonedDateTime expectedKeyThirdBucket = LocalDate.from(dateFormatter.parse("2012-03-26")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 23d); - - ZonedDateTime expectedKeyFourthBucket = LocalDate.from(dateFormatter.parse("2012-03-27")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(4)); + + DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd"); + ZonedDateTime expectedKeyFirstBucket = LocalDate.from(dateFormatter.parse("2012-03-24")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); + + ZonedDateTime expectedKeySecondBucket = LocalDate.from(dateFormatter.parse("2012-03-25")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); + + // the following is normalized using a 23h bucket width + ZonedDateTime expectedKeyThirdBucket = LocalDate.from(dateFormatter.parse("2012-03-26")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 23d); + + ZonedDateTime expectedKeyFourthBucket = LocalDate.from(dateFormatter.parse("2012-03-27")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); + } + ); } /** @@ -283,46 +279,45 @@ public void testSingleValuedFieldNormalised_timeZone_CET_DstEnd() throws Excepti indexRandom(true, builders); ensureSearchable(); - SearchResponse response = client().prepareSearch(IDX_DST_END) - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch(IDX_DST_END).addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.DAY) .timeZone(timezone) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.HOUR)) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(4)); - - DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd").withZone(ZoneOffset.UTC); - - ZonedDateTime expectedKeyFirstBucket = LocalDate.from(dateFormatter.parse("2012-10-27")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); - - ZonedDateTime expectedKeySecondBucket = LocalDate.from(dateFormatter.parse("2012-10-28")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); - - // the following is normalized using a 25h bucket width - ZonedDateTime expectedKeyThirdBucket = LocalDate.from(dateFormatter.parse("2012-10-29")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 25d); - - ZonedDateTime expectedKeyFourthBucket = LocalDate.from(dateFormatter.parse("2012-10-30")) - .atStartOfDay(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(4)); + + DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd").withZone(ZoneOffset.UTC); + + ZonedDateTime expectedKeyFirstBucket = LocalDate.from(dateFormatter.parse("2012-10-27")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); + + ZonedDateTime expectedKeySecondBucket = LocalDate.from(dateFormatter.parse("2012-10-28")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 24d); + + // the following is normalized using a 25h bucket width + ZonedDateTime expectedKeyThirdBucket = LocalDate.from(dateFormatter.parse("2012-10-29")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 25d); + + ZonedDateTime expectedKeyFourthBucket = LocalDate.from(dateFormatter.parse("2012-10-30")) + .atStartOfDay(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 24d); + } + ); } /** @@ -343,46 +338,45 @@ public void testSingleValuedFieldNormalised_timeZone_AsiaKathmandu() throws Exce indexRandom(true, builders); ensureSearchable(); - SearchResponse response = client().prepareSearch(IDX_DST_KATHMANDU) - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch(IDX_DST_KATHMANDU).addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.HOUR) .timeZone(timezone) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count").unit(DateHistogramInterval.MINUTE)) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(4)); - - DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC); - - ZonedDateTime expectedKeyFirstBucket = LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")) - .atZone(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); - - ZonedDateTime expectedKeySecondBucket = LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")) - .atZone(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 60d); - - // the following is normalized using a 105min bucket width - ZonedDateTime expectedKeyThirdBucket = LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")) - .atZone(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 105d); - - ZonedDateTime expectedKeyFourthBucket = LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")) - .atZone(timezone) - .withZoneSameInstant(ZoneOffset.UTC); - assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 60d); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(4)); + + DateFormatter dateFormatter = DateFormatter.forPattern("uuuu-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC); + + ZonedDateTime expectedKeyFirstBucket = LocalDateTime.from(dateFormatter.parse("1985-12-31T22:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(0), expectedKeyFirstBucket, 1L, nullValue(), null, null); + + ZonedDateTime expectedKeySecondBucket = LocalDateTime.from(dateFormatter.parse("1985-12-31T23:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(1), expectedKeySecondBucket, 2L, notNullValue(), 1d, 1d / 60d); + + // the following is normalized using a 105min bucket width + ZonedDateTime expectedKeyThirdBucket = LocalDateTime.from(dateFormatter.parse("1986-01-01T01:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(2), expectedKeyThirdBucket, 3L, notNullValue(), 1d, 1d / 105d); + + ZonedDateTime expectedKeyFourthBucket = LocalDateTime.from(dateFormatter.parse("1986-01-01T02:00:00")) + .atZone(timezone) + .withZoneSameInstant(ZoneOffset.UTC); + assertBucket(buckets.get(3), expectedKeyFourthBucket, 4L, notNullValue(), 1d, 1d / 60d); + } + ); } private static void addNTimes(int amount, String index, ZonedDateTime dateTime, List builders) throws Exception { @@ -411,211 +405,206 @@ private static void assertBucket( } public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(sum("sum").field("value")) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "sum")) - ) - .get(); - - assertSearchResponse(response); - - Histogram histo = response.getAggregations().get("histo"); - assertThat(histo, notNullValue()); - assertThat(histo.getName(), equalTo("histo")); - List buckets = histo.getBuckets(); - assertThat(buckets.size(), equalTo(3)); - Object[] propertiesKeys = (Object[]) ((InternalAggregation) histo).getProperty("_key"); - Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) histo).getProperty("_count"); - Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); - - ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(1L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - Sum sum = bucket.getAggregations().get("sum"); - assertThat(sum, notNullValue()); - assertThat(sum.value(), equalTo(1.0)); - SimpleValue deriv = bucket.getAggregations().get("deriv"); - assertThat(deriv, nullValue()); - assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key)); - assertThat((long) propertiesDocCounts[0], equalTo(1L)); - assertThat((double) propertiesCounts[0], equalTo(1.0)); - - key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(1); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(2L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - sum = bucket.getAggregations().get("sum"); - assertThat(sum, notNullValue()); - assertThat(sum.value(), equalTo(5.0)); - deriv = bucket.getAggregations().get("deriv"); - assertThat(deriv, notNullValue()); - assertThat(deriv.value(), equalTo(4.0)); - assertThat( - ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( - "histo", - AggregationPath.parse("deriv.value").getPathElementsAsStringList() - ), - equalTo(4.0) - ); - assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key)); - assertThat((long) propertiesDocCounts[1], equalTo(2L)); - assertThat((double) propertiesCounts[1], equalTo(5.0)); - - key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(2); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(3L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - sum = bucket.getAggregations().get("sum"); - assertThat(sum, notNullValue()); - assertThat(sum.value(), equalTo(15.0)); - deriv = bucket.getAggregations().get("deriv"); - assertThat(deriv, notNullValue()); - assertThat(deriv.value(), equalTo(10.0)); - assertThat( - ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( - "histo", - AggregationPath.parse("deriv.value").getPathElementsAsStringList() ), - equalTo(10.0) + response -> { + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + Object[] propertiesKeys = (Object[]) ((InternalAggregation) histo).getProperty("_key"); + Object[] propertiesDocCounts = (Object[]) ((InternalAggregation) histo).getProperty("_count"); + Object[] propertiesCounts = (Object[]) ((InternalAggregation) histo).getProperty("sum.value"); + + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(1L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.value(), equalTo(1.0)); + SimpleValue deriv = bucket.getAggregations().get("deriv"); + assertThat(deriv, nullValue()); + assertThat((ZonedDateTime) propertiesKeys[0], equalTo(key)); + assertThat((long) propertiesDocCounts[0], equalTo(1L)); + assertThat((double) propertiesCounts[0], equalTo(1.0)); + + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(2L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.value(), equalTo(5.0)); + deriv = bucket.getAggregations().get("deriv"); + assertThat(deriv, notNullValue()); + assertThat(deriv.value(), equalTo(4.0)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo(4.0) + ); + assertThat((ZonedDateTime) propertiesKeys[1], equalTo(key)); + assertThat((long) propertiesDocCounts[1], equalTo(2L)); + assertThat((double) propertiesCounts[1], equalTo(5.0)); + + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(3L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + assertThat(sum.value(), equalTo(15.0)); + deriv = bucket.getAggregations().get("deriv"); + assertThat(deriv, notNullValue()); + assertThat(deriv.value(), equalTo(10.0)); + assertThat( + ((InternalMultiBucketAggregation.InternalBucket) bucket).getProperty( + "histo", + AggregationPath.parse("deriv.value").getPathElementsAsStringList() + ), + equalTo(10.0) + ); + assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key)); + assertThat((long) propertiesDocCounts[2], equalTo(3L)); + assertThat((double) propertiesCounts[2], equalTo(15.0)); + } ); - assertThat((ZonedDateTime) propertiesKeys[2], equalTo(key)); - assertThat((long) propertiesDocCounts[2], equalTo(3L)); - assertThat((double) propertiesCounts[2], equalTo(15.0)); } public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( dateHistogram("histo").field("dates") .calendarInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(4)); - - ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(1L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, nullValue()); - - key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(1); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(3L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(2.0)); - - key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(2); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(5L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(2.0)); - - key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(3); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(3L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(-2.0)); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(4)); + + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(1L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, nullValue()); + + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(3L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(2.0)); + + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(5L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(2.0)); + + key = ZonedDateTime.of(2012, 4, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(3); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(3L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(-2.0)); + } + ); } public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx_unmapped").addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - assertThat(deriv.getBuckets().size(), equalTo(0)); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + assertThat(deriv.getBuckets().size(), equalTo(0)); + } + ); } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx", "idx_unmapped").addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) .minDocCount(0) .subAggregation(new DerivativePipelineAggregationBuilder("deriv", "_count")) - ) - .get(); - - assertSearchResponse(response); - - Histogram deriv = response.getAggregations().get("histo"); - assertThat(deriv, notNullValue()); - assertThat(deriv.getName(), equalTo("histo")); - List buckets = deriv.getBuckets(); - assertThat(buckets.size(), equalTo(3)); - - ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); - Histogram.Bucket bucket = buckets.get(0); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(1L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); - SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, nullValue()); - - key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(1); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(2L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(1.0)); - - key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); - bucket = buckets.get(2); - assertThat(bucket, notNullValue()); - assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); - assertThat(bucket.getDocCount(), equalTo(3L)); - assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); - docCountDeriv = bucket.getAggregations().get("deriv"); - assertThat(docCountDeriv, notNullValue()); - assertThat(docCountDeriv.value(), equalTo(1.0)); + ), + response -> { + Histogram deriv = response.getAggregations().get("histo"); + assertThat(deriv, notNullValue()); + assertThat(deriv.getName(), equalTo("histo")); + List buckets = deriv.getBuckets(); + assertThat(buckets.size(), equalTo(3)); + + ZonedDateTime key = ZonedDateTime.of(2012, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + Histogram.Bucket bucket = buckets.get(0); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(1L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(true)); + SimpleValue docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, nullValue()); + + key = ZonedDateTime.of(2012, 2, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(1); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(2L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(1.0)); + + key = ZonedDateTime.of(2012, 3, 1, 0, 0, 0, 0, ZoneOffset.UTC); + bucket = buckets.get(2); + assertThat(bucket, notNullValue()); + assertThat((ZonedDateTime) bucket.getKey(), equalTo(key)); + assertThat(bucket.getDocCount(), equalTo(3L)); + assertThat(bucket.getAggregations().asList().isEmpty(), is(false)); + docCountDeriv = bucket.getAggregations().get("deriv"); + assertThat(docCountDeriv, notNullValue()); + assertThat(docCountDeriv.value(), equalTo(1.0)); + } + ); } - } diff --git a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java index 324716f0f25ce..e0c91689b333d 100644 --- a/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java +++ b/modules/aggregations/src/internalClusterTest/java/org/elasticsearch/aggregations/pipeline/SerialDiffIT.java @@ -9,7 +9,6 @@ package org.elasticsearch.aggregations.pipeline; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.aggregations.AggregationIntegTestCase; import org.elasticsearch.common.collect.EvictingQueue; import org.elasticsearch.common.util.Maps; @@ -31,7 +30,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.max; import static org.elasticsearch.search.aggregations.AggregationBuilders.min; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.diff; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -220,59 +219,56 @@ private void setupExpected(MetricTarget target) { } public void testBasicDiff() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( + assertNoFailuresAndResponse( + prepareSearch("idx").addAggregation( histogram("histo").field(INTERVAL_FIELD) .interval(interval) .extendedBounds(0L, (long) (interval * (numBuckets - 1))) .subAggregation(metric) .subAggregation(diff("diff_counts", "_count").lag(lag).gapPolicy(gapPolicy)) .subAggregation(diff("diff_values", "the_metric").lag(lag).gapPolicy(gapPolicy)) - ) - .get(); - - assertSearchResponse(response); - - Histogram histo = response.getAggregations().get("histo"); - assertThat(histo, notNullValue()); - assertThat(histo.getName(), equalTo("histo")); - List buckets = histo.getBuckets(); - assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); - - List expectedCounts = testValues.get(MetricTarget.COUNT.toString()); - List expectedValues = testValues.get(MetricTarget.VALUE.toString()); - - Iterator actualIter = buckets.iterator(); - Iterator expectedBucketIter = mockHisto.iterator(); - Iterator expectedCountsIter = expectedCounts.iterator(); - Iterator expectedValuesIter = expectedValues.iterator(); - - while (actualIter.hasNext()) { - assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); - - Histogram.Bucket actual = actualIter.next(); - PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); - Double expectedCount = expectedCountsIter.next(); - Double expectedValue = expectedValuesIter.next(); - - assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); - assertThat("doc counts do not match", actual.getDocCount(), equalTo((long) expected.count)); - - assertBucketContents(actual, expectedCount, expectedValue); - } + ), + response -> { + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat("Size of buckets array is not correct.", buckets.size(), equalTo(mockHisto.size())); + + List expectedCounts = testValues.get(MetricTarget.COUNT.toString()); + List expectedValues = testValues.get(MetricTarget.VALUE.toString()); + + Iterator actualIter = buckets.iterator(); + Iterator expectedBucketIter = mockHisto.iterator(); + Iterator expectedCountsIter = expectedCounts.iterator(); + Iterator expectedValuesIter = expectedValues.iterator(); + + while (actualIter.hasNext()) { + assertValidIterators(expectedBucketIter, expectedCountsIter, expectedValuesIter); + + Histogram.Bucket actual = actualIter.next(); + PipelineAggregationHelperTests.MockBucket expected = expectedBucketIter.next(); + Double expectedCount = expectedCountsIter.next(); + Double expectedValue = expectedValuesIter.next(); + + assertThat("keys do not match", ((Number) actual.getKey()).longValue(), equalTo(expected.key)); + assertThat("doc counts do not match", actual.getDocCount(), equalTo((long) expected.count)); + + assertBucketContents(actual, expectedCount, expectedValue); + } + } + ); } public void testInvalidLagSize() { try { - client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(INTERVAL_FIELD) - .interval(interval) - .extendedBounds(0L, (long) (interval * (numBuckets - 1))) - .subAggregation(metric) - .subAggregation(diff("diff_counts", "_count").lag(-1).gapPolicy(gapPolicy)) - ) - .get(); + prepareSearch("idx").addAggregation( + histogram("histo").field(INTERVAL_FIELD) + .interval(interval) + .extendedBounds(0L, (long) (interval * (numBuckets - 1))) + .subAggregation(metric) + .subAggregation(diff("diff_counts", "_count").lag(-1).gapPolicy(gapPolicy)) + ).get(); } catch (IllegalArgumentException e) { assertThat(e.getMessage(), is("[lag] must be a positive integer: [diff_counts]")); } diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml index e02b5fdc8b3cf..25522264c4dc0 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/adjacency_matrix.yml @@ -66,8 +66,6 @@ setup: - skip: version: " - 7.8.99" reason: fixed in 7.9.0 - features: node_selector - - do: indices.create: index: lookup @@ -91,8 +89,6 @@ setup: { "num": [4] } - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: index: test preference: hit-same-shard-copy @@ -145,8 +141,6 @@ setup: # The second request should hit the cache - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: index: test preference: hit-same-shard-copy diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml index e68aa621f1544..7d173ce9511bb 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filter.yml @@ -24,12 +24,7 @@ setup: --- "Terms lookup gets cached": - - skip: - features: node_selector - - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: rest_total_hits_as_int: true size: 0 @@ -63,8 +58,6 @@ setup: - match: { indices.test.total.request_cache.miss_count: 1 } - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: rest_total_hits_as_int: true size: 0 @@ -99,12 +92,7 @@ setup: --- "Standard queries get cached": - - skip: - features: node_selector - - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: rest_total_hits_as_int: true size: 0 @@ -137,8 +125,6 @@ setup: # Try again - it'll cache - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: rest_total_hits_as_int: true size: 0 diff --git a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml index 0053d22f05a80..da3010ad8437a 100644 --- a/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml +++ b/modules/aggregations/src/yamlRestTest/resources/rest-api-spec/test/aggregations/filters_bucket.yml @@ -393,7 +393,6 @@ null meta: - skip: version: " - 7.10.99" reason: cache fixed in 7.11.0 - features: node_selector - do: bulk: @@ -407,8 +406,6 @@ null meta: string_field: foo bar - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: index: test_1 body: @@ -440,8 +437,6 @@ null meta: # This should be entirely fresh because updating the mapping busted the cache. - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: index: test_1 body: @@ -512,9 +507,6 @@ nested: --- "cache hits": - - skip: - features: node_selector - - do: indices.create: index: test @@ -539,8 +531,6 @@ nested: {"mentions" : ["abc"]} - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: rest_total_hits_as_int: true size: 0 @@ -579,8 +569,6 @@ nested: - match: { indices.test.total.request_cache.miss_count: 1 } - do: - node_selector: - version: current # the version of the node that parsed the request is part of the cache key. search: rest_total_hits_as_int: true size: 0 diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/QueryStringWithAnalyzersIT.java index fa6f08337233e..32e20aea3c2e1 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -53,8 +53,7 @@ public void testCustomWordDelimiterQueryString() { refresh(); assertHitCount( - client().prepareSearch("test") - .setQuery(queryStringQuery("foo.baz").defaultOperator(Operator.AND).field("field1").field("field2")), + prepareSearch("test").setQuery(queryStringQuery("foo.baz").defaultOperator(Operator.AND).field("field1").field("field2")), 1L ); } diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java index 857d16b8cdc89..a9ffdb60419f9 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java @@ -83,8 +83,8 @@ private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, I client().prepareIndex("test").setId("1").setSource("field", "foo").get(); assertNoFailures(indicesAdmin().prepareRefresh("test").execute().actionGet()); - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L); - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")), 0L); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "buzz")), 0L); Response analyzeResponse = indicesAdmin().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); assertEquals(2, analyzeResponse.getTokens().size()); assertEquals("foo", analyzeResponse.getTokens().get(0).getTerm()); @@ -124,9 +124,9 @@ private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, I assertTrue(tokens.contains(testTerm)); } - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")), 1L); long expectedHitCount = preview ? 0L : 1L; - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)), expectedHitCount); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)), expectedHitCount); } } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index dd2fbe8a19bab..90a8d3379775f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -99,19 +99,13 @@ import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory; @@ -120,21 +114,15 @@ import org.elasticsearch.index.analysis.PreConfiguredTokenizer; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import org.elasticsearch.lucene.analysis.miscellaneous.DisableGraphAttribute; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.synonyms.SynonymsManagementAPIService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.tartarus.snowball.ext.DutchStemmer; import org.tartarus.snowball.ext.FrenchStemmer; @@ -144,7 +132,6 @@ import java.util.List; import java.util.Map; import java.util.TreeMap; -import java.util.function.Supplier; import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings; @@ -156,24 +143,9 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri private final SetOnce synonymsManagementServiceHolder = new SetOnce<>(); @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - this.scriptServiceHolder.set(scriptService); - this.synonymsManagementServiceHolder.set(new SynonymsManagementAPIService(client)); + public Collection createComponents(PluginServices services) { + this.scriptServiceHolder.set(services.scriptService()); + this.synonymsManagementServiceHolder.set(new SynonymsManagementAPIService(services.client())); return Collections.emptyList(); } @@ -260,7 +232,7 @@ public Map> getTokenFilters() { return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings) { @Override public TokenStream create(TokenStream tokenStream) { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + "Please change the filter name to [edge_ngram] instead." @@ -301,7 +273,7 @@ public TokenStream create(TokenStream tokenStream) { return new NGramTokenFilterFactory(indexSettings, environment, name, settings) { @Override public TokenStream create(TokenStream tokenStream) { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + "Please change the filter name to [ngram] instead." @@ -371,12 +343,12 @@ public Map> getTokenizers() { tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new); tokenizers.put("thai", ThaiTokenizerFactory::new); tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "The [nGram] tokenizer name was deprecated in 7.6. " + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." ); - } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_6_0)) { + } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "nGram_tokenizer_deprecation", @@ -388,12 +360,12 @@ public Map> getTokenizers() { }); tokenizers.put("ngram", NGramTokenizerFactory::new); tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "The [edgeNGram] tokenizer name was deprecated in 7.6. " + "Please use the tokenizer name to [edge_nGram] for indices created in versions 8 or higher instead." ); - } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_6_0)) { + } else if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "edgeNGram_tokenizer_deprecation", @@ -588,7 +560,7 @@ public List getPreConfiguredTokenFilters() { ) ); filters.add(PreConfiguredTokenFilter.indexVersion("word_delimiter_graph", false, false, (input, version) -> { - boolean adjustOffsets = version.onOrAfter(IndexVersion.V_7_3_0); + boolean adjustOffsets = version.onOrAfter(IndexVersions.V_7_3_0); return new WordDelimiterGraphFilter( input, adjustOffsets, @@ -613,7 +585,7 @@ public List getPreConfiguredTokenizers() { tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.indexVersion("edge_ngram", (version) -> { - if (version.onOrAfter(IndexVersion.V_7_3_0)) { + if (version.onOrAfter(IndexVersions.V_7_3_0)) { return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); } return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); @@ -626,12 +598,12 @@ public List getPreConfiguredTokenizers() { // Temporary shim for aliases. TODO deprecate after they are moved tokenizers.add(PreConfiguredTokenizer.indexVersion("nGram", (version) -> { - if (version.onOrAfter(IndexVersion.V_8_0_0)) { + if (version.onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "The [nGram] tokenizer name was deprecated in 7.6. " + "Please use the tokenizer name to [ngram] for indices created in versions 8 or higher instead." ); - } else if (version.onOrAfter(IndexVersion.V_7_6_0)) { + } else if (version.onOrAfter(IndexVersions.V_7_6_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "nGram_tokenizer_deprecation", @@ -642,12 +614,12 @@ public List getPreConfiguredTokenizers() { return new NGramTokenizer(); })); tokenizers.add(PreConfiguredTokenizer.indexVersion("edgeNGram", (version) -> { - if (version.onOrAfter(IndexVersion.V_8_0_0)) { + if (version.onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "The [edgeNGram] tokenizer name was deprecated in 7.6. " + "Please use the tokenizer name to [edge_ngram] for indices created in versions 8 or higher instead." ); - } else if (version.onOrAfter(IndexVersion.V_7_6_0)) { + } else if (version.onOrAfter(IndexVersions.V_7_6_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "edgeNGram_tokenizer_deprecation", @@ -655,7 +627,7 @@ public List getPreConfiguredTokenizers() { + "Please change the tokenizer name to [edge_ngram] instead." ); } - if (version.onOrAfter(IndexVersion.V_7_3_0)) { + if (version.onOrAfter(IndexVersions.V_7_3_0)) { return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); } return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java index 38ce33dde9a01..c18cb3dddf0ae 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -32,7 +33,7 @@ public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ) .put("index.analysis.analyzer.custom_analyzer.type", "custom") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") @@ -56,7 +57,7 @@ public void testNGramFilterInCustomAnalyzerDeprecationError() throws IOException .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_6_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) ) .put("index.analysis.analyzer.custom_analyzer.type", "custom") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") @@ -81,7 +82,7 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ) .put("index.analysis.analyzer.custom_analyzer.type", "custom") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") @@ -105,7 +106,7 @@ public void testEdgeNGramFilterInCustomAnalyzerDeprecationError() throws IOExcep .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_6_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_6_0) ) .put("index.analysis.analyzer.custom_analyzer.type", "custom") .put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard") @@ -131,13 +132,13 @@ public void testNGramTokenizerDeprecation() throws IOException { doTestPrebuiltTokenizerDeprecation( "nGram", "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_5_2), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), false ); doTestPrebuiltTokenizerDeprecation( "edgeNGram", "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_5_2), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), false ); doTestPrebuiltTokenizerDeprecation( @@ -145,8 +146,8 @@ public void testNGramTokenizerDeprecation() throws IOException { "ngram", IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_6_0, - IndexVersion.max(IndexVersion.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0)) + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) ), true ); @@ -155,8 +156,8 @@ public void testNGramTokenizerDeprecation() throws IOException { "edge_ngram", IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_6_0, - IndexVersion.max(IndexVersion.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0)) + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) ), true ); @@ -165,7 +166,7 @@ public void testNGramTokenizerDeprecation() throws IOException { () -> doTestPrebuiltTokenizerDeprecation( "nGram", "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), true ) ); @@ -174,7 +175,7 @@ public void testNGramTokenizerDeprecation() throws IOException { () -> doTestPrebuiltTokenizerDeprecation( "edgeNGram", "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), true ) ); @@ -183,13 +184,13 @@ public void testNGramTokenizerDeprecation() throws IOException { doTestCustomTokenizerDeprecation( "nGram", "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_5_2), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), false ); doTestCustomTokenizerDeprecation( "edgeNGram", "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_5_2), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2), false ); doTestCustomTokenizerDeprecation( @@ -197,8 +198,8 @@ public void testNGramTokenizerDeprecation() throws IOException { "ngram", IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_6_0, - IndexVersion.max(IndexVersion.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0)) + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) ), true ); @@ -207,8 +208,8 @@ public void testNGramTokenizerDeprecation() throws IOException { "edge_ngram", IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_6_0, - IndexVersion.max(IndexVersion.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0)) + IndexVersions.V_7_6_0, + IndexVersion.max(IndexVersions.V_7_6_0, IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0)) ), true ); @@ -217,7 +218,7 @@ public void testNGramTokenizerDeprecation() throws IOException { () -> doTestCustomTokenizerDeprecation( "nGram", "ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), true ) ); @@ -226,7 +227,7 @@ public void testNGramTokenizerDeprecation() throws IOException { () -> doTestCustomTokenizerDeprecation( "edgeNGram", "edge_ngram", - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), true ) ); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java index 7f522d55addc7..412e3ba3e380a 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.indices.analysis.AnalysisModule; @@ -51,8 +52,8 @@ public void testPreConfiguredTokenizer() throws IOException { { IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_7_3_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) ); try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edge_ngram")) { NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); @@ -65,8 +66,8 @@ public void testPreConfiguredTokenizer() throws IOException { { IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_7_3_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_7_3_0) ); try (IndexAnalyzers indexAnalyzers = buildAnalyzers(version, "edgeNGram")) { NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); @@ -90,8 +91,8 @@ public void testPreConfiguredTokenizer() throws IOException { IndexAnalyzers indexAnalyzers = buildAnalyzers( IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_3_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_3_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ), "edgeNGram" ) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index d7e4dbf4c0c01..e7e9aa32b1684 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -114,8 +114,7 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException { ); client().prepareIndex("test").setId("1").setSource("name", "ARCOTEL Hotels Deutschland").get(); refresh(); - SearchResponse search = client().prepareSearch("test") - .setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR)) + SearchResponse search = prepareSearch("test").setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR)) .highlighter(new HighlightBuilder().field("name.autocomplete")) .get(); assertHighlight(search, 0, "name.autocomplete", 0, equalTo("ARCOTEL Hotels Deutschland")); @@ -159,26 +158,21 @@ public void testMultiPhraseCutoff() throws IOException { ) .get(); refresh(); - SearchResponse search = client().prepareSearch() - .setQuery(matchPhraseQuery("body", "Test: http://www.facebook.com ")) + SearchResponse search = prepareSearch().setQuery(matchPhraseQuery("body", "Test: http://www.facebook.com ")) .highlighter(new HighlightBuilder().field("body").highlighterType("fvh")) .get(); assertHighlight(search, 0, "body", 0, startsWith("Test: http://www.facebook.com")); - search = client().prepareSearch() - .setQuery( - matchPhraseQuery( - "body", - "Test: http://www.facebook.com " - + "http://elasticsearch.org http://xing.com http://cnn.com " - + "http://quora.com http://twitter.com this is a test for highlighting " - + "feature Test: http://www.facebook.com http://elasticsearch.org " - + "http://xing.com http://cnn.com http://quora.com http://twitter.com this " - + "is a test for highlighting feature" - ) + search = prepareSearch().setQuery( + matchPhraseQuery( + "body", + "Test: http://www.facebook.com " + + "http://elasticsearch.org http://xing.com http://cnn.com " + + "http://quora.com http://twitter.com this is a test for highlighting " + + "feature Test: http://www.facebook.com http://elasticsearch.org " + + "http://xing.com http://cnn.com http://quora.com http://twitter.com this " + + "is a test for highlighting feature" ) - .highlighter(new HighlightBuilder().field("body").highlighterType("fvh")) - .execute() - .actionGet(); + ).highlighter(new HighlightBuilder().field("body").highlighterType("fvh")).execute().actionGet(); assertHighlight( search, 0, diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java index 7b60c1a64abb6..3a519f594a57f 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PredicateTokenScriptFilterTests.java @@ -24,11 +24,11 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.threadpool.ThreadPool; @@ -36,6 +36,9 @@ import java.io.IOException; import java.util.Collections; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class PredicateTokenScriptFilterTests extends ESTokenStreamTestCase { public void testSimpleFilter() throws IOException { @@ -57,9 +60,9 @@ public boolean execute(Token token) { } }; - @SuppressWarnings("unchecked") ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap(), () -> 1L) { @Override + @SuppressWarnings("unchecked") public FactoryType compile(Script script, ScriptContext context) { assertEquals(context, AnalysisPredicateScript.CONTEXT); assertEquals(new Script("my_script"), script); @@ -67,23 +70,13 @@ public FactoryType compile(Script script, ScriptContext FactoryType compile(Script script, ScriptContext FactoryType compile(Script script, ScriptContext createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider unused, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { final APMTracer apmTracer = telemetryProvider.get().getTracer(); - apmTracer.setClusterName(clusterService.getClusterName().value()); - apmTracer.setNodeName(clusterService.getNodeName()); + apmTracer.setClusterName(services.clusterService().getClusterName().value()); + apmTracer.setNodeName(services.clusterService().getNodeName()); final APMAgentSettings apmAgentSettings = new APMAgentSettings(); apmAgentSettings.syncAgentSystemProperties(settings); - apmAgentSettings.addClusterSettingsListeners(clusterService, telemetryProvider.get()); - - final APMMeter apmMeter = telemetryProvider.get().getMeter(); + final APMMeterService apmMeter = new APMMeterService(settings); + apmAgentSettings.addClusterSettingsListeners(services.clusterService(), telemetryProvider.get(), apmMeter); return List.of(apmTracer, apmMeter); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java similarity index 74% rename from modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java rename to modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java index 92d7d692f0ea5..57649f7e3dfa6 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/Instruments.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java @@ -6,31 +6,43 @@ * Side Public License, v 1. */ -package org.elasticsearch.telemetry.apm.internal.metrics; +package org.elasticsearch.telemetry.apm; import io.opentelemetry.api.metrics.Meter; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.telemetry.apm.internal.metrics.DoubleCounterAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.DoubleGaugeAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.DoubleHistogramAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.DoubleUpDownCounterAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.LongCounterAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.LongGaugeAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.LongHistogramAdapter; +import org.elasticsearch.telemetry.apm.internal.metrics.LongUpDownCounterAdapter; import org.elasticsearch.telemetry.metric.DoubleCounter; import org.elasticsearch.telemetry.metric.DoubleGauge; import org.elasticsearch.telemetry.metric.DoubleHistogram; import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; import org.elasticsearch.telemetry.metric.LongCounter; import org.elasticsearch.telemetry.metric.LongGauge; import org.elasticsearch.telemetry.metric.LongHistogram; import org.elasticsearch.telemetry.metric.LongUpDownCounter; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.util.List; import java.util.Map; import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; /** - * Container for registering and fetching instruments by type and name. + * Container for registering and fetching meterRegistrar by type and name. * Instrument names must be unique for a given type on registration. - * {@link #setProvider(Meter)} is used to change the provider for all existing instruments. + * {@link #setProvider(Meter)} is used to change the provider for all existing meterRegistrar. */ -public class Instruments { +public class APMMeterRegistry implements MeterRegistry { private final Registrar doubleCounters = new Registrar<>(); private final Registrar doubleUpDownCounters = new Registrar<>(); private final Registrar doubleGauges = new Registrar<>(); @@ -42,7 +54,7 @@ public class Instruments { private final Meter meter; - public Instruments(Meter meter) { + public APMMeterRegistry(Meter meter) { this.meter = meter; } @@ -60,82 +72,98 @@ public Instruments(Meter meter) { // Access to registration has to be restricted when the provider is updated in ::setProvider protected final ReleasableLock registerLock = new ReleasableLock(new ReentrantLock()); - public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + @Override + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { try (ReleasableLock lock = registerLock.acquire()) { return doubleCounters.register(new DoubleCounterAdapter(meter, name, description, unit)); } } + @Override public DoubleCounter getDoubleCounter(String name) { return doubleCounters.get(name); } - public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + @Override + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { try (ReleasableLock lock = registerLock.acquire()) { return doubleUpDownCounters.register(new DoubleUpDownCounterAdapter(meter, name, description, unit)); } } + @Override public DoubleUpDownCounter getDoubleUpDownCounter(String name) { return doubleUpDownCounters.get(name); } - public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + @Override + public DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer) { try (ReleasableLock lock = registerLock.acquire()) { - return doubleGauges.register(new DoubleGaugeAdapter(meter, name, description, unit)); + return doubleGauges.register(new DoubleGaugeAdapter(meter, name, description, unit, observer)); } } + @Override public DoubleGauge getDoubleGauge(String name) { return doubleGauges.get(name); } - public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + @Override + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { try (ReleasableLock lock = registerLock.acquire()) { return doubleHistograms.register(new DoubleHistogramAdapter(meter, name, description, unit)); } } + @Override public DoubleHistogram getDoubleHistogram(String name) { return doubleHistograms.get(name); } - public LongCounter registerLongCounter(String name, String description, String unit) { + @Override + public LongCounter registerLongCounter(String name, String description, String unit) { try (ReleasableLock lock = registerLock.acquire()) { return longCounters.register(new LongCounterAdapter(meter, name, description, unit)); } } + @Override public LongCounter getLongCounter(String name) { return longCounters.get(name); } - public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + @Override + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { try (ReleasableLock lock = registerLock.acquire()) { return longUpDownCounters.register(new LongUpDownCounterAdapter(meter, name, description, unit)); } } + @Override public LongUpDownCounter getLongUpDownCounter(String name) { return longUpDownCounters.get(name); } - public LongGauge registerLongGauge(String name, String description, String unit) { + @Override + public LongGauge registerLongGauge(String name, String description, String unit, Supplier observer) { try (ReleasableLock lock = registerLock.acquire()) { - return longGauges.register(new LongGaugeAdapter(meter, name, description, unit)); + return longGauges.register(new LongGaugeAdapter(meter, name, description, unit, observer)); } } + @Override public LongGauge getLongGauge(String name) { return longGauges.get(name); } - public LongHistogram registerLongHistogram(String name, String description, String unit) { + @Override + public LongHistogram registerLongHistogram(String name, String description, String unit) { try (ReleasableLock lock = registerLock.acquire()) { return longHistograms.register(new LongHistogramAdapter(meter, name, description, unit)); } } + @Override public LongHistogram getLongHistogram(String name) { return longHistograms.get(name); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java similarity index 92% rename from modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java rename to modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java index d3d485f52bc49..01f65eb60aa74 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/AbstractInstrument.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/AbstractInstrument.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.telemetry.apm.internal.metrics; +package org.elasticsearch.telemetry.apm; import io.opentelemetry.api.metrics.Meter; @@ -50,11 +50,11 @@ public String getUnit() { return unit.toString(); } - T getInstrument() { + protected T getInstrument() { return delegate.get(); } - String getDescription() { + protected String getDescription() { return description; } @@ -62,5 +62,5 @@ void setProvider(@Nullable Meter meter) { delegate.set(doBuildInstrument(Objects.requireNonNull(meter))); } - abstract T buildInstrument(Meter meter); + protected abstract T buildInstrument(Meter meter); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index e4a194ebe0172..41816318a3586 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -17,7 +17,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; import java.security.AccessController; @@ -48,17 +47,20 @@ public class APMAgentSettings { "true" ); - public void addClusterSettingsListeners(ClusterService clusterService, APMTelemetryProvider apmTelemetryProvider) { + public void addClusterSettingsListeners( + ClusterService clusterService, + APMTelemetryProvider apmTelemetryProvider, + APMMeterService apmMeterService + ) { final ClusterSettings clusterSettings = clusterService.getClusterSettings(); final APMTracer apmTracer = apmTelemetryProvider.getTracer(); - final APMMeter apmMeter = apmTelemetryProvider.getMeter(); clusterSettings.addSettingsUpdateConsumer(APM_ENABLED_SETTING, enabled -> { apmTracer.setEnabled(enabled); this.setAgentSetting("instrument", Boolean.toString(enabled)); }); clusterSettings.addSettingsUpdateConsumer(TELEMETRY_METRICS_ENABLED_SETTING, enabled -> { - apmMeter.setEnabled(enabled); + apmMeterService.setEnabled(enabled); // The agent records data other than spans, e.g. JVM metrics, so we toggle this setting in order to // minimise its impact to a running Elasticsearch. this.setAgentSetting("recording", Boolean.toString(enabled)); diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java new file mode 100644 index 0000000000000..21f0b8491f644 --- /dev/null +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMMeterService.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal; + +import io.opentelemetry.api.GlobalOpenTelemetry; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.telemetry.apm.APMMeterRegistry; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.function.Supplier; + +public class APMMeterService extends AbstractLifecycleComponent { + private final APMMeterRegistry meterRegistry; + + private final Supplier otelMeterSupplier; + private final Supplier noopMeterSupplier; + + protected volatile boolean enabled; + + public APMMeterService(Settings settings) { + this(settings, APMMeterService.otelMeter(), APMMeterService.noopMeter()); + } + + public APMMeterService(Settings settings, Supplier otelMeterSupplier, Supplier noopMeterSupplier) { + this(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.get(settings), otelMeterSupplier, noopMeterSupplier); + } + + public APMMeterService(boolean enabled, Supplier otelMeterSupplier, Supplier noopMeterSupplier) { + this.enabled = enabled; + this.otelMeterSupplier = otelMeterSupplier; + this.noopMeterSupplier = noopMeterSupplier; + this.meterRegistry = new APMMeterRegistry(enabled ? createOtelMeter() : createNoopMeter()); + } + + public APMMeterRegistry getMeterRegistry() { + return meterRegistry; + } + + /** + * @see APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider, APMMeterService) + */ + void setEnabled(boolean enabled) { + this.enabled = enabled; + if (enabled) { + meterRegistry.setProvider(createOtelMeter()); + } else { + meterRegistry.setProvider(createNoopMeter()); + } + } + + @Override + protected void doStart() {} + + @Override + protected void doStop() { + meterRegistry.setProvider(createNoopMeter()); + } + + @Override + protected void doClose() {} + + protected Meter createOtelMeter() { + assert this.enabled; + return AccessController.doPrivileged((PrivilegedAction) otelMeterSupplier::get); + } + + protected Meter createNoopMeter() { + return noopMeterSupplier.get(); + } + + protected static Supplier noopMeter() { + return () -> OpenTelemetry.noop().getMeter("noop"); + } + + // to be used within doPrivileged block + private static Supplier otelMeter() { + var openTelemetry = GlobalOpenTelemetry.get(); + var meter = openTelemetry.getMeter("elasticsearch"); + return () -> meter; + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java index ae9d91cc6ec51..5b78c2f5f6a3c 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMTelemetryProvider.java @@ -10,18 +10,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.telemetry.apm.internal.metrics.APMMeter; +import org.elasticsearch.telemetry.apm.APMMeterRegistry; import org.elasticsearch.telemetry.apm.internal.tracing.APMTracer; public class APMTelemetryProvider implements TelemetryProvider { private final Settings settings; private final APMTracer apmTracer; - private final APMMeter apmMeter; + private final APMMeterService apmMeterService; public APMTelemetryProvider(Settings settings) { this.settings = settings; apmTracer = new APMTracer(settings); - apmMeter = new APMMeter(settings); + apmMeterService = new APMMeterService(settings); } @Override @@ -30,7 +30,7 @@ public APMTracer getTracer() { } @Override - public APMMeter getMeter() { - return apmMeter; + public APMMeterRegistry getMeterRegistry() { + return apmMeterService.getMeterRegistry(); } } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java deleted file mode 100644 index 0a8d425579ca2..0000000000000 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeter.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.telemetry.apm.internal.metrics; - -import io.opentelemetry.api.GlobalOpenTelemetry; -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.metrics.Meter; - -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.telemetry.apm.internal.APMTelemetryProvider; -import org.elasticsearch.telemetry.metric.DoubleCounter; -import org.elasticsearch.telemetry.metric.DoubleGauge; -import org.elasticsearch.telemetry.metric.DoubleHistogram; -import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; -import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.LongGauge; -import org.elasticsearch.telemetry.metric.LongHistogram; -import org.elasticsearch.telemetry.metric.LongUpDownCounter; - -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.util.function.Supplier; - -import static org.elasticsearch.telemetry.apm.internal.APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING; - -public class APMMeter extends AbstractLifecycleComponent implements org.elasticsearch.telemetry.metric.Meter { - private final Instruments instruments; - - private final Supplier otelMeterSupplier; - private final Supplier noopMeterSupplier; - - private volatile boolean enabled; - - public APMMeter(Settings settings) { - this(settings, APMMeter.otelMeter(), APMMeter.noopMeter()); - } - - public APMMeter(Settings settings, Supplier otelMeterSupplier, Supplier noopMeterSupplier) { - this.enabled = TELEMETRY_METRICS_ENABLED_SETTING.get(settings); - this.otelMeterSupplier = otelMeterSupplier; - this.noopMeterSupplier = noopMeterSupplier; - this.instruments = new Instruments(enabled ? createOtelMeter() : createNoopMeter()); - } - - /** - * @see org.elasticsearch.telemetry.apm.internal.APMAgentSettings#addClusterSettingsListeners(ClusterService, APMTelemetryProvider) - */ - public void setEnabled(boolean enabled) { - this.enabled = enabled; - if (enabled) { - instruments.setProvider(createOtelMeter()); - } else { - instruments.setProvider(createNoopMeter()); - } - } - - @Override - protected void doStart() {} - - @Override - protected void doStop() { - instruments.setProvider(createNoopMeter()); - } - - @Override - protected void doClose() {} - - @Override - public DoubleCounter registerDoubleCounter(String name, String description, String unit) { - return instruments.registerDoubleCounter(name, description, unit); - } - - @Override - public DoubleCounter getDoubleCounter(String name) { - return instruments.getDoubleCounter(name); - } - - @Override - public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { - return instruments.registerDoubleUpDownCounter(name, description, unit); - } - - @Override - public DoubleUpDownCounter getDoubleUpDownCounter(String name) { - return instruments.getDoubleUpDownCounter(name); - } - - @Override - public DoubleGauge registerDoubleGauge(String name, String description, String unit) { - return instruments.registerDoubleGauge(name, description, unit); - } - - @Override - public DoubleGauge getDoubleGauge(String name) { - return instruments.getDoubleGauge(name); - } - - @Override - public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { - return instruments.registerDoubleHistogram(name, description, unit); - } - - @Override - public DoubleHistogram getDoubleHistogram(String name) { - return instruments.getDoubleHistogram(name); - } - - @Override - public LongCounter registerLongCounter(String name, String description, String unit) { - return instruments.registerLongCounter(name, description, unit); - } - - @Override - public LongCounter getLongCounter(String name) { - return instruments.getLongCounter(name); - } - - @Override - public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { - return instruments.registerLongUpDownCounter(name, description, unit); - } - - @Override - public LongUpDownCounter getLongUpDownCounter(String name) { - return instruments.getLongUpDownCounter(name); - } - - @Override - public LongGauge registerLongGauge(String name, String description, String unit) { - return instruments.registerLongGauge(name, description, unit); - } - - @Override - public LongGauge getLongGauge(String name) { - return instruments.getLongGauge(name); - } - - @Override - public LongHistogram registerLongHistogram(String name, String description, String unit) { - return instruments.registerLongHistogram(name, description, unit); - } - - @Override - public LongHistogram getLongHistogram(String name) { - return instruments.getLongHistogram(name); - } - - Meter createOtelMeter() { - assert this.enabled; - return AccessController.doPrivileged((PrivilegedAction) otelMeterSupplier::get); - } - - private Meter createNoopMeter() { - return noopMeterSupplier.get(); - } - - private static Supplier noopMeter() { - return () -> OpenTelemetry.noop().getMeter("noop"); - } - - // to be used within doPrivileged block - private static Supplier otelMeter() { - var openTelemetry = GlobalOpenTelemetry.get(); - var meter = openTelemetry.getMeter("elasticsearch"); - return () -> meter; - } - - // scope for testing - Instruments getInstruments() { - return instruments; - } -} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java index b25ffdff5481b..faba8c2e3e67e 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleCounterAdapter.java @@ -8,23 +8,24 @@ package org.elasticsearch.telemetry.apm.internal.metrics; +import io.opentelemetry.api.metrics.DoubleCounter; import io.opentelemetry.api.metrics.Meter; +import org.elasticsearch.telemetry.apm.AbstractInstrument; + import java.util.Map; import java.util.Objects; /** * DoubleGaugeAdapter wraps an otel ObservableDoubleMeasurement */ -public class DoubleCounterAdapter extends AbstractInstrument - implements - org.elasticsearch.telemetry.metric.DoubleCounter { +public class DoubleCounterAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.DoubleCounter { public DoubleCounterAdapter(Meter meter, String name, String description, String unit) { super(meter, name, description, unit); } - io.opentelemetry.api.metrics.DoubleCounter buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.DoubleCounter buildInstrument(Meter meter) { return Objects.requireNonNull(meter) .counterBuilder(getName()) .ofDoubles() diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java index 54f33be21698b..faef8bd723fcf 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleGaugeAdapter.java @@ -9,47 +9,57 @@ package org.elasticsearch.telemetry.apm.internal.metrics; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.ObservableDoubleGauge; + +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.telemetry.apm.AbstractInstrument; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; -import java.util.Collections; -import java.util.Map; import java.util.Objects; -import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; /** - * DoubleGaugeAdapter wraps an otel ObservableDoubleMeasurement + * DoubleGaugeAdapter wraps an otel ObservableLongGauge */ -public class DoubleGaugeAdapter extends AbstractInstrument +public class DoubleGaugeAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.DoubleGauge { - private final AtomicReference valueWithAttributes; + private final Supplier observer; + private final ReleasableLock closedLock = new ReleasableLock(new ReentrantLock()); + private boolean closed = false; - public DoubleGaugeAdapter(Meter meter, String name, String description, String unit) { + public DoubleGaugeAdapter(Meter meter, String name, String description, String unit, Supplier observer) { super(meter, name, description, unit); - this.valueWithAttributes = new AtomicReference<>(new ValueWithAttributes(0.0, Collections.emptyMap())); + this.observer = observer; } @Override - io.opentelemetry.api.metrics.ObservableDoubleGauge buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.ObservableDoubleGauge buildInstrument(Meter meter) { return Objects.requireNonNull(meter) .gaugeBuilder(getName()) .setDescription(getDescription()) .setUnit(getUnit()) .buildWithCallback(measurement -> { - var localValueWithAttributed = valueWithAttributes.get(); - measurement.record(localValueWithAttributed.value(), OtelHelper.fromMap(localValueWithAttributed.attributes())); + DoubleWithAttributes observation; + try { + observation = observer.get(); + } catch (RuntimeException err) { + assert false : "observer must not throw [" + err.getMessage() + "]"; + return; + } + measurement.record(observation.value(), OtelHelper.fromMap(observation.attributes())); }); } @Override - public void record(double value) { - record(value, Collections.emptyMap()); + public void close() throws Exception { + try (ReleasableLock lock = closedLock.acquire()) { + if (closed == false) { + getInstrument().close(); + } + closed = true; + } } - - @Override - public void record(double value, Map attributes) { - this.valueWithAttributes.set(new ValueWithAttributes(value, attributes)); - } - - private record ValueWithAttributes(double value, Map attributes) {} } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java index 5fd1a8a189b0f..e126aa6af7cf0 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleHistogramAdapter.java @@ -8,15 +8,18 @@ package org.elasticsearch.telemetry.apm.internal.metrics; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.Meter; +import org.elasticsearch.telemetry.apm.AbstractInstrument; + import java.util.Map; import java.util.Objects; /** * DoubleHistogramAdapter wraps an otel DoubleHistogram */ -public class DoubleHistogramAdapter extends AbstractInstrument +public class DoubleHistogramAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.DoubleHistogram { @@ -25,7 +28,7 @@ public DoubleHistogramAdapter(Meter meter, String name, String description, Stri } @Override - io.opentelemetry.api.metrics.DoubleHistogram buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.DoubleHistogram buildInstrument(Meter meter) { var builder = Objects.requireNonNull(meter).histogramBuilder(getName()); return builder.setDescription(getDescription()).setUnit(getUnit()).build(); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java index 9a2fc1b564766..a204627a04f1e 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/DoubleUpDownCounterAdapter.java @@ -8,15 +8,18 @@ package org.elasticsearch.telemetry.apm.internal.metrics; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; import io.opentelemetry.api.metrics.Meter; +import org.elasticsearch.telemetry.apm.AbstractInstrument; + import java.util.Map; import java.util.Objects; /** * DoubleUpDownCounterAdapter wraps an otel DoubleUpDownCounter */ -public class DoubleUpDownCounterAdapter extends AbstractInstrument +public class DoubleUpDownCounterAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.DoubleUpDownCounter { @@ -25,7 +28,7 @@ public DoubleUpDownCounterAdapter(Meter meter, String name, String description, } @Override - io.opentelemetry.api.metrics.DoubleUpDownCounter buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.DoubleUpDownCounter buildInstrument(Meter meter) { return Objects.requireNonNull(meter) .upDownCounterBuilder(getName()) .ofDoubles() diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java index 122d16d9e1aa4..9b46b8c97994a 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongCounterAdapter.java @@ -8,24 +8,25 @@ package org.elasticsearch.telemetry.apm.internal.metrics; +import io.opentelemetry.api.metrics.LongCounter; import io.opentelemetry.api.metrics.Meter; +import org.elasticsearch.telemetry.apm.AbstractInstrument; + import java.util.Map; import java.util.Objects; /** * LongCounterAdapter wraps an otel LongCounter */ -public class LongCounterAdapter extends AbstractInstrument - implements - org.elasticsearch.telemetry.metric.LongCounter { +public class LongCounterAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.LongCounter { public LongCounterAdapter(Meter meter, String name, String description, String unit) { super(meter, name, description, unit); } @Override - io.opentelemetry.api.metrics.LongCounter buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.LongCounter buildInstrument(Meter meter) { var builder = Objects.requireNonNull(meter).counterBuilder(getName()); return builder.setDescription(getDescription()).setUnit(getUnit()).build(); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java index 66d2287a765dc..e297ba7ee963a 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongGaugeAdapter.java @@ -9,48 +9,55 @@ package org.elasticsearch.telemetry.apm.internal.metrics; import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.ObservableLongGauge; + +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.telemetry.apm.AbstractInstrument; +import org.elasticsearch.telemetry.metric.LongWithAttributes; -import java.util.Collections; -import java.util.Map; import java.util.Objects; -import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; /** - * LongGaugeAdapter wraps an otel ObservableLongMeasurement + * LongGaugeAdapter wraps an otel ObservableLongGauge */ -public class LongGaugeAdapter extends AbstractInstrument - implements - org.elasticsearch.telemetry.metric.LongGauge { - private final AtomicReference valueWithAttributes; +public class LongGaugeAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.LongGauge { + private final Supplier observer; + private final ReleasableLock closedLock = new ReleasableLock(new ReentrantLock()); + private boolean closed = false; - public LongGaugeAdapter(Meter meter, String name, String description, String unit) { + public LongGaugeAdapter(Meter meter, String name, String description, String unit, Supplier observer) { super(meter, name, description, unit); - this.valueWithAttributes = new AtomicReference<>(new ValueWithAttributes(0L, Collections.emptyMap())); + this.observer = observer; } @Override - io.opentelemetry.api.metrics.ObservableLongGauge buildInstrument(Meter meter) { - + protected io.opentelemetry.api.metrics.ObservableLongGauge buildInstrument(Meter meter) { return Objects.requireNonNull(meter) .gaugeBuilder(getName()) .ofLongs() .setDescription(getDescription()) .setUnit(getUnit()) .buildWithCallback(measurement -> { - var localValueWithAttributed = valueWithAttributes.get(); - measurement.record(localValueWithAttributed.value(), OtelHelper.fromMap(localValueWithAttributed.attributes())); + LongWithAttributes observation; + try { + observation = observer.get(); + } catch (RuntimeException err) { + assert false : "observer must not throw [" + err.getMessage() + "]"; + return; + } + measurement.record(observation.value(), OtelHelper.fromMap(observation.attributes())); }); } @Override - public void record(long value) { - record(value, Collections.emptyMap()); - } - - @Override - public void record(long value, Map attributes) { - this.valueWithAttributes.set(new ValueWithAttributes(value, attributes)); + public void close() throws Exception { + try (ReleasableLock lock = closedLock.acquire()) { + if (closed == false) { + getInstrument().close(); + } + closed = true; + } } - - private record ValueWithAttributes(long value, Map attributes) {} } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java index bb5be4866e7b7..2b8e76df0dd0e 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongHistogramAdapter.java @@ -8,24 +8,25 @@ package org.elasticsearch.telemetry.apm.internal.metrics; +import io.opentelemetry.api.metrics.LongHistogram; import io.opentelemetry.api.metrics.Meter; +import org.elasticsearch.telemetry.apm.AbstractInstrument; + import java.util.Map; import java.util.Objects; /** * LongHistogramAdapter wraps an otel LongHistogram */ -public class LongHistogramAdapter extends AbstractInstrument - implements - org.elasticsearch.telemetry.metric.LongHistogram { +public class LongHistogramAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.LongHistogram { public LongHistogramAdapter(Meter meter, String name, String description, String unit) { super(meter, name, description, unit); } @Override - io.opentelemetry.api.metrics.LongHistogram buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.LongHistogram buildInstrument(Meter meter) { return Objects.requireNonNull(meter) .histogramBuilder(getName()) .ofLongs() diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java index e5af85e4ed192..a59a114bc2264 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/LongUpDownCounterAdapter.java @@ -8,15 +8,18 @@ package org.elasticsearch.telemetry.apm.internal.metrics; +import io.opentelemetry.api.metrics.LongUpDownCounter; import io.opentelemetry.api.metrics.Meter; +import org.elasticsearch.telemetry.apm.AbstractInstrument; + import java.util.Map; import java.util.Objects; /** * LongUpDownCounterAdapter wraps an otel LongUpDownCounter */ -public class LongUpDownCounterAdapter extends AbstractInstrument +public class LongUpDownCounterAdapter extends AbstractInstrument implements org.elasticsearch.telemetry.metric.LongUpDownCounter { @@ -25,7 +28,7 @@ public LongUpDownCounterAdapter(Meter meter, String name, String description, St } @Override - io.opentelemetry.api.metrics.LongUpDownCounter buildInstrument(Meter meter) { + protected io.opentelemetry.api.metrics.LongUpDownCounter buildInstrument(Meter meter) { var builder = Objects.requireNonNull(meter).upDownCounterBuilder(getName()); return builder.setDescription(getDescription()).setUnit(getUnit()).build(); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java index 673025a1a41f4..18bf66cee5391 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/metrics/OtelHelper.java @@ -23,8 +23,16 @@ static Attributes fromMap(Map attributes) { builder.put(k, value); } else if (v instanceof Long value) { builder.put(k, value); + } else if (v instanceof Integer value) { + builder.put(k, value); + } else if (v instanceof Byte value) { + builder.put(k, value); + } else if (v instanceof Short value) { + builder.put(k, value); } else if (v instanceof Double value) { builder.put(k, value); + } else if (v instanceof Float value) { + builder.put(k, value); } else if (v instanceof Boolean value) { builder.put(k, value); } else { diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java similarity index 65% rename from modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java rename to modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java index 1064b8820b089..38fb0f0e0a8ac 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/APMMeterTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/APMMeterRegistryTests.java @@ -6,61 +6,63 @@ * Side Public License, v 1. */ -package org.elasticsearch.telemetry.apm.internal.metrics; +package org.elasticsearch.telemetry.apm; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.Meter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; +import org.elasticsearch.telemetry.apm.internal.APMMeterService; +import org.elasticsearch.telemetry.apm.internal.TestAPMMeterService; import org.elasticsearch.telemetry.metric.DoubleCounter; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.sameInstance; -public class APMMeterTests extends ESTestCase { +public class APMMeterRegistryTests extends ESTestCase { Meter testOtel = OpenTelemetry.noop().getMeter("test"); Meter noopOtel = OpenTelemetry.noop().getMeter("noop"); public void testMeterIsSetUponConstruction() { // test default - APMMeter apmMeter = new APMMeter(Settings.EMPTY, () -> testOtel, () -> noopOtel); + APMMeterService apmMeter = new APMMeterService(Settings.EMPTY, () -> testOtel, () -> noopOtel); - Meter meter = apmMeter.getInstruments().getMeter(); + Meter meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(noopOtel)); // test explicitly enabled var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); - meter = apmMeter.getInstruments().getMeter(); + meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(testOtel)); // test explicitly disabled settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); - meter = apmMeter.getInstruments().getMeter(); + meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(noopOtel)); } public void testMeterIsOverridden() { - APMMeter apmMeter = new APMMeter(Settings.EMPTY, () -> testOtel, () -> noopOtel); + TestAPMMeterService apmMeter = new TestAPMMeterService(Settings.EMPTY, () -> testOtel, () -> noopOtel); - Meter meter = apmMeter.getInstruments().getMeter(); + Meter meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(noopOtel)); apmMeter.setEnabled(true); - meter = apmMeter.getInstruments().getMeter(); + meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(testOtel)); } public void testLookupByName() { var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - var apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + var apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel).getMeterRegistry(); DoubleCounter registeredCounter = apmMeter.registerDoubleCounter("name", "desc", "unit"); DoubleCounter lookedUpCounter = apmMeter.getDoubleCounter("name"); @@ -70,15 +72,15 @@ public void testLookupByName() { public void testNoopIsSetOnStop() { var settings = Settings.builder().put(APMAgentSettings.TELEMETRY_METRICS_ENABLED_SETTING.getKey(), true).build(); - APMMeter apmMeter = new APMMeter(settings, () -> testOtel, () -> noopOtel); + APMMeterService apmMeter = new APMMeterService(settings, () -> testOtel, () -> noopOtel); apmMeter.start(); - Meter meter = apmMeter.getInstruments().getMeter(); + Meter meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(testOtel)); apmMeter.stop(); - meter = apmMeter.getInstruments().getMeter(); + meter = apmMeter.getMeterRegistry().getMeter(); assertThat(meter, sameInstance(noopOtel)); } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java similarity index 86% rename from modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java rename to modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java index 4390fd4ac0784..f18d39fb39c6c 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsConcurrencyTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/MeterRegistryConcurrencyTests.java @@ -6,7 +6,7 @@ * Side Public License, v 1. */ -package org.elasticsearch.telemetry.apm.internal.metrics; +package org.elasticsearch.telemetry.apm; import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.DoubleCounterBuilder; @@ -27,7 +27,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; -public class InstrumentsConcurrencyTests extends ESTestCase { +public class MeterRegistryConcurrencyTests extends ESTestCase { private final String name = "name"; private final String description = "desc"; private final String unit = "kg"; @@ -91,26 +91,26 @@ public ObservableLongCounter buildWithCallback(Consumer instruments.registerLongCounter(name, description, unit)); + var registerThread = new Thread(() -> meterRegistrar.registerLongCounter(name, description, unit)); // registerThread has a countDown latch that is simulating a long-running registration registerThread.start(); buildLatch.await(); // wait for registerThread to hold the lock - var setProviderThread = new Thread(() -> instruments.setProvider(noopMeter)); + var setProviderThread = new Thread(() -> meterRegistrar.setProvider(noopMeter)); // a setProviderThread will attempt to override a meter, but will wait to acquireLock setProviderThread.start(); // assert that a thread is waiting for a lock during long-running registration assertBusy(() -> assertThat(setProviderThread.getState(), equalTo(Thread.State.WAITING))); // assert that the old lockingMeter is still in place - assertBusy(() -> assertThat(instruments.getMeter(), sameInstance(lockingMeter))); + assertBusy(() -> assertThat(meterRegistrar.getMeter(), sameInstance(lockingMeter))); // finish long-running registration registerLatch.countDown(); // assert that a meter was overriden - assertBusy(() -> assertThat(instruments.getMeter(), sameInstance(lockingMeter))); + assertBusy(() -> assertThat(meterRegistrar.getMeter(), sameInstance(lockingMeter))); } } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/RecordingOtelMeter.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/RecordingOtelMeter.java new file mode 100644 index 0000000000000..6661653499f63 --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/RecordingOtelMeter.java @@ -0,0 +1,648 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleGaugeBuilder; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; +import io.opentelemetry.api.metrics.DoubleUpDownCounter; +import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; +import io.opentelemetry.api.metrics.LongCounter; +import io.opentelemetry.api.metrics.LongCounterBuilder; +import io.opentelemetry.api.metrics.LongGaugeBuilder; +import io.opentelemetry.api.metrics.LongHistogram; +import io.opentelemetry.api.metrics.LongHistogramBuilder; +import io.opentelemetry.api.metrics.LongUpDownCounter; +import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; +import io.opentelemetry.api.metrics.Meter; +import io.opentelemetry.api.metrics.ObservableDoubleCounter; +import io.opentelemetry.api.metrics.ObservableDoubleGauge; +import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; +import io.opentelemetry.api.metrics.ObservableDoubleUpDownCounter; +import io.opentelemetry.api.metrics.ObservableLongCounter; +import io.opentelemetry.api.metrics.ObservableLongGauge; +import io.opentelemetry.api.metrics.ObservableLongMeasurement; +import io.opentelemetry.api.metrics.ObservableLongUpDownCounter; +import io.opentelemetry.context.Context; + +import org.elasticsearch.telemetry.InstrumentType; +import org.elasticsearch.telemetry.MetricRecorder; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.function.Consumer; + +public class RecordingOtelMeter implements Meter { + + Queue callbacks = new ConcurrentLinkedQueue<>(); + + public void collectMetrics() { + callbacks.forEach(Callback::doCall); + } + + public MetricRecorder getRecorder() { + return recorder; + } + + private final MetricRecorder recorder = new MetricRecorder<>(); + + @Override + public LongCounterBuilder counterBuilder(String name) { + return new RecordingLongCounterBuilder(name); + } + + @Override + public LongUpDownCounterBuilder upDownCounterBuilder(String name) { + return new RecordingLongUpDownBuilder(name); + } + + @Override + public DoubleHistogramBuilder histogramBuilder(String name) { + return new RecordingDoubleHistogramBuilder(name); + } + + @Override + public DoubleGaugeBuilder gaugeBuilder(String name) { + return new RecordingDoubleGaugeBuilder(name); + } + + // Counter + private class RecordingLongCounterBuilder extends AbstractBuilder implements LongCounterBuilder { + RecordingLongCounterBuilder(String name) { + super(name); + } + + @Override + public LongCounterBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public LongCounterBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public DoubleCounterBuilder ofDoubles() { + return new RecordingDoubleCounterBuilder(this); + } + + @Override + public LongCounter build() { + LongRecorder counter = new LongRecorder(name); + recorder.register(counter, counter.getInstrument(), name, description, unit); + return counter; + } + + @Override + public ObservableLongCounter buildWithCallback(Consumer callback) { + unimplemented(); + return null; + } + + @Override + public ObservableLongMeasurement buildObserver() { + unimplemented(); + return null; + } + } + + private class LongRecorder extends LongUpDownRecorder implements LongCounter, OtelInstrument { + LongRecorder(String name) { + super(name, InstrumentType.LONG_COUNTER); + } + + @Override + public void add(long value) { + assert value >= 0; + super.add(value); + } + + @Override + public void add(long value, Attributes attributes) { + assert value >= 0; + super.add(value, attributes); + } + + @Override + public void add(long value, Attributes attributes, Context context) { + assert value >= 0; + super.add(value, attributes, context); + } + } + + private class RecordingDoubleCounterBuilder extends AbstractBuilder implements DoubleCounterBuilder { + + RecordingDoubleCounterBuilder(AbstractBuilder other) { + super(other); + } + + @Override + public DoubleCounterBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public DoubleCounterBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public DoubleCounter build() { + DoubleRecorder counter = new DoubleRecorder(name); + recorder.register(counter, counter.getInstrument(), name, description, unit); + return counter; + } + + @Override + public ObservableDoubleCounter buildWithCallback(Consumer callback) { + unimplemented(); + return null; + } + + @Override + public ObservableDoubleMeasurement buildObserver() { + unimplemented(); + return null; + } + } + + private class DoubleRecorder extends DoubleUpDownRecorder implements DoubleCounter, OtelInstrument { + DoubleRecorder(String name) { + super(name, InstrumentType.DOUBLE_COUNTER); + } + + @Override + public void add(double value) { + assert value >= 0; + super.add(value); + } + + @Override + public void add(double value, Attributes attributes) { + assert value >= 0; + super.add(value, attributes); + } + + @Override + public void add(double value, Attributes attributes, Context context) { + assert value >= 0; + super.add(value, attributes, context); + } + } + + private class RecordingLongUpDownBuilder extends AbstractBuilder implements LongUpDownCounterBuilder { + RecordingLongUpDownBuilder(String name) { + super(name); + } + + @Override + public LongUpDownCounterBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public LongUpDownCounterBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public DoubleUpDownCounterBuilder ofDoubles() { + return new RecordingDoubleUpDownBuilder(this); + } + + @Override + public LongUpDownCounter build() { + LongUpDownRecorder counter = new LongUpDownRecorder(name); + recorder.register(counter, counter.getInstrument(), name, description, unit); + return counter; + } + + @Override + public ObservableLongUpDownCounter buildWithCallback(Consumer callback) { + unimplemented(); + return null; + } + + @Override + public ObservableLongMeasurement buildObserver() { + unimplemented(); + return null; + } + } + + private class LongUpDownRecorder extends AbstractInstrument implements LongUpDownCounter, OtelInstrument { + LongUpDownRecorder(String name) { + super(name, InstrumentType.LONG_UP_DOWN_COUNTER); + } + + protected LongUpDownRecorder(String name, InstrumentType instrument) { + // used by LongRecorder + super(name, instrument); + } + + @Override + public void add(long value) { + recorder.call(instrument, name, value, null); + } + + @Override + public void add(long value, Attributes attributes) { + recorder.call(instrument, name, value, toMap(attributes)); + } + + @Override + public void add(long value, Attributes attributes, Context context) { + unimplemented(); + } + } + + private class RecordingDoubleUpDownBuilder extends AbstractBuilder implements DoubleUpDownCounterBuilder { + + RecordingDoubleUpDownBuilder(AbstractBuilder other) { + super(other); + } + + @Override + public DoubleUpDownCounterBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public DoubleUpDownCounterBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public DoubleUpDownCounter build() { + DoubleUpDownRecorder counter = new DoubleUpDownRecorder(name); + recorder.register(counter, counter.getInstrument(), name, description, unit); + return counter; + } + + @Override + public ObservableDoubleUpDownCounter buildWithCallback(Consumer callback) { + unimplemented(); + return null; + } + + @Override + public ObservableDoubleMeasurement buildObserver() { + unimplemented(); + return null; + } + } + + private class DoubleUpDownRecorder extends AbstractInstrument implements DoubleUpDownCounter, OtelInstrument { + DoubleUpDownRecorder(String name) { + super(name, InstrumentType.LONG_UP_DOWN_COUNTER); + } + + protected DoubleUpDownRecorder(String name, InstrumentType instrument) { + // used by DoubleRecorder + super(name, instrument); + } + + @Override + public void add(double value) { + recorder.call(instrument, name, value, null); + } + + @Override + public void add(double value, Attributes attributes) { + recorder.call(instrument, name, value, toMap(attributes)); + } + + @Override + public void add(double value, Attributes attributes, Context context) { + unimplemented(); + } + } + + interface Callback { + void doCall(); + } + + abstract static class AbstractInstrument { + protected final String name; + protected final InstrumentType instrument; + + AbstractInstrument(String name, InstrumentType instrument) { + this.name = name; + this.instrument = instrument; + } + + public InstrumentType getInstrument() { + return instrument; + } + + protected void unimplemented() { + throw new UnsupportedOperationException("unimplemented"); + } + + Map toMap(Attributes attributes) { + if (attributes == null) { + return null; + } + if (attributes.isEmpty()) { + return Collections.emptyMap(); + } + Map map = new HashMap<>(attributes.size()); + attributes.forEach((k, v) -> map.put(k.getKey(), v)); + return map; + } + } + + abstract static class AbstractBuilder { + protected final String name; + protected String description; + protected String unit; + + AbstractBuilder(String name) { + this.name = name; + } + + AbstractBuilder(AbstractBuilder other) { + this.name = other.name; + this.description = other.description; + this.unit = other.unit; + } + + void innerSetDescription(String description) { + this.description = description; + } + + void innerSetUnit(String unit) { + this.unit = unit; + } + + protected void unimplemented() { + throw new UnsupportedOperationException("unimplemented"); + } + } + + interface OtelInstrument {} + + // Gauges + private class RecordingDoubleGaugeBuilder extends AbstractBuilder implements DoubleGaugeBuilder { + RecordingDoubleGaugeBuilder(String name) { + super(name); + } + + @Override + public DoubleGaugeBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public DoubleGaugeBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public LongGaugeBuilder ofLongs() { + return new RecordingLongGaugeBuilder(this); + } + + @Override + public ObservableDoubleGauge buildWithCallback(Consumer callback) { + DoubleGaugeRecorder gauge = new DoubleGaugeRecorder(name, callback); + recorder.register(gauge, gauge.getInstrument(), name, description, unit); + callbacks.add(gauge); + return gauge; + } + + @Override + public ObservableDoubleMeasurement buildObserver() { + DoubleMeasurementRecorder measurement = new DoubleMeasurementRecorder(name); + recorder.register(measurement, measurement.getInstrument(), name, description, unit); + return measurement; + } + } + + private class DoubleGaugeRecorder extends AbstractInstrument implements ObservableDoubleGauge, Callback, OtelInstrument { + final Consumer callback; + + DoubleGaugeRecorder(String name, Consumer callback) { + super(name, InstrumentType.DOUBLE_GAUGE); + this.callback = callback; + } + + @Override + public void close() { + callbacks.remove(this); + } + + public void doCall() { + callback.accept(new DoubleMeasurementRecorder(name, instrument)); + } + } + + private class DoubleMeasurementRecorder extends AbstractInstrument implements ObservableDoubleMeasurement, OtelInstrument { + DoubleMeasurementRecorder(String name, InstrumentType instrument) { + super(name, instrument); + } + + DoubleMeasurementRecorder(String name) { + super(name, InstrumentType.DOUBLE_GAUGE); + } + + @Override + public void record(double value) { + recorder.call(instrument, name, value, null); + } + + @Override + public void record(double value, Attributes attributes) { + recorder.call(instrument, name, value, toMap(attributes)); + } + } + + private class RecordingLongGaugeBuilder extends AbstractBuilder implements LongGaugeBuilder { + RecordingLongGaugeBuilder(AbstractBuilder other) { + super(other); + } + + @Override + public LongGaugeBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public LongGaugeBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public ObservableLongGauge buildWithCallback(Consumer callback) { + LongGaugeRecorder gauge = new LongGaugeRecorder(name, callback); + recorder.register(gauge, gauge.getInstrument(), name, description, unit); + callbacks.add(gauge); + return gauge; + } + + @Override + public ObservableLongMeasurement buildObserver() { + LongMeasurementRecorder measurement = new LongMeasurementRecorder(name); + recorder.register(measurement, measurement.getInstrument(), name, description, unit); + return measurement; + } + } + + private class LongGaugeRecorder extends AbstractInstrument implements ObservableLongGauge, Callback, OtelInstrument { + final Consumer callback; + + LongGaugeRecorder(String name, Consumer callback) { + super(name, InstrumentType.LONG_GAUGE); + this.callback = callback; + } + + @Override + public void close() { + callbacks.remove(this); + } + + public void doCall() { + callback.accept(new LongMeasurementRecorder(name, instrument)); + } + } + + private class LongMeasurementRecorder extends AbstractInstrument implements ObservableLongMeasurement, OtelInstrument { + LongMeasurementRecorder(String name, InstrumentType instrument) { + super(name, instrument); + } + + LongMeasurementRecorder(String name) { + super(name, InstrumentType.LONG_GAUGE); + } + + @Override + public void record(long value) { + recorder.call(instrument, name, value, null); + } + + @Override + public void record(long value, Attributes attributes) { + recorder.call(instrument, name, value, toMap(attributes)); + } + } + + // Histograms + private class RecordingDoubleHistogramBuilder extends AbstractBuilder implements DoubleHistogramBuilder { + RecordingDoubleHistogramBuilder(String name) { + super(name); + } + + @Override + public DoubleHistogramBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public DoubleHistogramBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public LongHistogramBuilder ofLongs() { + return new RecordingLongHistogramBuilder(this); + } + + @Override + public DoubleHistogram build() { + return new DoubleHistogramRecorder(name); + } + } + + private class DoubleHistogramRecorder extends AbstractInstrument implements DoubleHistogram, OtelInstrument { + DoubleHistogramRecorder(String name) { + super(name, InstrumentType.DOUBLE_HISTOGRAM); + } + + @Override + public void record(double value) { + recorder.call(getInstrument(), name, value, null); + } + + @Override + public void record(double value, Attributes attributes) { + recorder.call(getInstrument(), name, value, toMap(attributes)); + } + + @Override + public void record(double value, Attributes attributes, Context context) { + unimplemented(); + } + } + + private class RecordingLongHistogramBuilder extends AbstractBuilder implements LongHistogramBuilder { + + RecordingLongHistogramBuilder(AbstractBuilder other) { + super(other); + } + + @Override + public LongHistogramBuilder setDescription(String description) { + innerSetDescription(description); + return this; + } + + @Override + public LongHistogramBuilder setUnit(String unit) { + innerSetUnit(unit); + return this; + } + + @Override + public LongHistogram build() { + return new LongHistogramRecorder(name); + } + } + + private class LongHistogramRecorder extends AbstractInstrument implements LongHistogram, OtelInstrument { + LongHistogramRecorder(String name) { + super(name, InstrumentType.LONG_HISTOGRAM); + } + + @Override + public void record(long value) { + recorder.call(getInstrument(), name, value, null); + } + + @Override + public void record(long value, Attributes attributes) { + recorder.call(getInstrument(), name, value, toMap(attributes)); + } + + @Override + public void record(long value, Attributes attributes, Context context) { + unimplemented(); + } + } +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/TestAPMMeterService.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/TestAPMMeterService.java new file mode 100644 index 0000000000000..bed611802d1e8 --- /dev/null +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/TestAPMMeterService.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.apm.internal; + +import io.opentelemetry.api.metrics.Meter; + +import org.elasticsearch.common.settings.Settings; + +import java.util.function.Supplier; + +public class TestAPMMeterService extends APMMeterService { + public TestAPMMeterService(Settings settings, Supplier otelMeterSupplier, Supplier noopMeterSupplier) { + super(settings, otelMeterSupplier, noopMeterSupplier); + } + + public void setEnabled(boolean enabled) { + // expose pkg private for testing + super.setEnabled(enabled); + } +} diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java index 1e230eefe32dc..e8cd18521f842 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/GaugeAdapterTests.java @@ -8,116 +8,96 @@ package org.elasticsearch.telemetry.apm.internal.metrics; -import io.opentelemetry.api.common.Attributes; -import io.opentelemetry.api.metrics.DoubleGaugeBuilder; -import io.opentelemetry.api.metrics.LongGaugeBuilder; -import io.opentelemetry.api.metrics.Meter; -import io.opentelemetry.api.metrics.ObservableDoubleMeasurement; -import io.opentelemetry.api.metrics.ObservableLongMeasurement; - +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.apm.APMMeterRegistry; +import org.elasticsearch.telemetry.apm.RecordingOtelMeter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongWithAttributes; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import org.junit.Before; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; +import java.util.List; import java.util.Map; -import java.util.function.Consumer; +import java.util.concurrent.atomic.AtomicReference; -import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; public class GaugeAdapterTests extends ESTestCase { - Meter testMeter = Mockito.mock(Meter.class); - LongGaugeBuilder longGaugeBuilder = Mockito.mock(LongGaugeBuilder.class); - DoubleGaugeBuilder mockDoubleGaugeBuilder = Mockito.mock(DoubleGaugeBuilder.class); + RecordingOtelMeter otelMeter; + APMMeterRegistry registry; @Before public void init() { - when(longGaugeBuilder.setDescription(Mockito.anyString())).thenReturn(longGaugeBuilder); - when(longGaugeBuilder.setUnit(Mockito.anyString())).thenReturn(longGaugeBuilder); - - - when(mockDoubleGaugeBuilder.ofLongs()).thenReturn(longGaugeBuilder); - when(mockDoubleGaugeBuilder.setUnit(Mockito.anyString())).thenReturn(mockDoubleGaugeBuilder); - when(mockDoubleGaugeBuilder.setDescription(Mockito.anyString())).thenReturn(mockDoubleGaugeBuilder); - when(testMeter.gaugeBuilder(anyString())).thenReturn(mockDoubleGaugeBuilder); + otelMeter = new RecordingOtelMeter(); + registry = new APMMeterRegistry(otelMeter); } // testing that a value reported is then used in a callback - @SuppressWarnings("unchecked") - public void testLongGaugeRecord() { - LongGaugeAdapter longGaugeAdapter = new LongGaugeAdapter(testMeter, "name", "desc", "unit"); + public void testLongGaugeRecord() throws Exception { + AtomicReference attrs = new AtomicReference<>(); + LongGauge gauge = registry.registerLongGauge("name", "desc", "unit", attrs::get); - // recording a value - longGaugeAdapter.record(1L, Map.of("k", 1L)); + attrs.set(new LongWithAttributes(1L, Map.of("k", 1L))); - // upon metric export, the consumer will be called - ArgumentCaptor> captor = ArgumentCaptor.forClass(Consumer.class); - verify(longGaugeBuilder).buildWithCallback(captor.capture()); + otelMeter.collectMetrics(); - Consumer value = captor.getValue(); - // making sure that a consumer will fetch the value passed down upon recording of a value - TestLongMeasurement testLongMeasurement = new TestLongMeasurement(); - value.accept(testLongMeasurement); + List metrics = otelMeter.getRecorder().getMeasurements(gauge); + assertThat(metrics, hasSize(1)); + assertThat(metrics.get(0).attributes(), equalTo(Map.of("k", 1L))); + assertThat(metrics.get(0).getLong(), equalTo(1L)); - assertThat(testLongMeasurement.value, Matchers.equalTo(1L)); - assertThat(testLongMeasurement.attributes, Matchers.equalTo(Attributes.builder().put("k", 1).build())); - } + attrs.set(new LongWithAttributes(2L, Map.of("k", 5L))); - // testing that a value reported is then used in a callback - @SuppressWarnings("unchecked") - public void testDoubleGaugeRecord() { - DoubleGaugeAdapter doubleGaugeAdapter = new DoubleGaugeAdapter(testMeter, "name", "desc", "unit"); + otelMeter.getRecorder().resetCalls(); + otelMeter.collectMetrics(); - // recording a value - doubleGaugeAdapter.record(1.0, Map.of("k", 1.0)); + metrics = otelMeter.getRecorder().getMeasurements(gauge); + assertThat(metrics, hasSize(1)); + assertThat(metrics.get(0).attributes(), equalTo(Map.of("k", 5L))); + assertThat(metrics.get(0).getLong(), equalTo(2L)); - // upon metric export, the consumer will be called - ArgumentCaptor> captor = ArgumentCaptor.forClass(Consumer.class); - verify(mockDoubleGaugeBuilder).buildWithCallback(captor.capture()); + gauge.close(); - Consumer value = captor.getValue(); - // making sure that a consumer will fetch the value passed down upon recording of a value - TestDoubleMeasurement testLongMeasurement = new TestDoubleMeasurement(); - value.accept(testLongMeasurement); + otelMeter.getRecorder().resetCalls(); + otelMeter.collectMetrics(); - assertThat(testLongMeasurement.value, Matchers.equalTo(1.0)); - assertThat(testLongMeasurement.attributes, Matchers.equalTo(Attributes.builder().put("k", 1.0).build())); + metrics = otelMeter.getRecorder().getMeasurements(gauge); + assertThat(metrics, hasSize(0)); } - private static class TestDoubleMeasurement implements ObservableDoubleMeasurement { - double value; - Attributes attributes; + // testing that a value reported is then used in a callback + public void testDoubleGaugeRecord() throws Exception { + AtomicReference attrs = new AtomicReference<>(); + DoubleGauge gauge = registry.registerDoubleGauge("name", "desc", "unit", attrs::get); - @Override - public void record(double value) { - this.value = value; - } + attrs.set(new DoubleWithAttributes(1.0d, Map.of("k", 1L))); - @Override - public void record(double value, Attributes attributes) { - this.value = value; - this.attributes = attributes; + otelMeter.collectMetrics(); - } - } + List metrics = otelMeter.getRecorder().getMeasurements(gauge); + assertThat(metrics, hasSize(1)); + assertThat(metrics.get(0).attributes(), equalTo(Map.of("k", 1L))); + assertThat(metrics.get(0).getDouble(), equalTo(1.0d)); + + attrs.set(new DoubleWithAttributes(2.0d, Map.of("k", 5L))); + + otelMeter.getRecorder().resetCalls(); + otelMeter.collectMetrics(); - private static class TestLongMeasurement implements ObservableLongMeasurement { - long value; - Attributes attributes; + metrics = otelMeter.getRecorder().getMeasurements(gauge); + assertThat(metrics, hasSize(1)); + assertThat(metrics.get(0).attributes(), equalTo(Map.of("k", 5L))); + assertThat(metrics.get(0).getDouble(), equalTo(2.0d)); - @Override - public void record(long value) { - this.value = value; - } + gauge.close(); - @Override - public void record(long value, Attributes attributes) { - this.value = value; - this.attributes = attributes; + otelMeter.getRecorder().resetCalls(); + otelMeter.collectMetrics(); - } + metrics = otelMeter.getRecorder().getMeasurements(gauge); + assertThat(metrics, hasSize(0)); } } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java deleted file mode 100644 index daf511fcf7042..0000000000000 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/metrics/InstrumentsTests.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.telemetry.apm.internal.metrics; - -import io.opentelemetry.api.OpenTelemetry; -import io.opentelemetry.api.metrics.Meter; - -import org.elasticsearch.test.ESTestCase; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.sameInstance; - -public class InstrumentsTests extends ESTestCase { - Meter noopMeter = OpenTelemetry.noop().getMeter("noop"); - Meter someOtherMeter = OpenTelemetry.noop().getMeter("xyz"); - String name = "name"; - String description = "desc"; - String unit = "kg"; - - public void testRegistrationAndLookup() { - Instruments instruments = new Instruments(noopMeter); - { - var registered = instruments.registerDoubleCounter(name, description, unit); - var lookedUp = instruments.getDoubleCounter(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerDoubleUpDownCounter(name, description, unit); - var lookedUp = instruments.getDoubleUpDownCounter(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerDoubleGauge(name, description, unit); - var lookedUp = instruments.getDoubleGauge(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerDoubleHistogram(name, description, unit); - var lookedUp = instruments.getDoubleHistogram(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerLongCounter(name, description, unit); - var lookedUp = instruments.getLongCounter(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerLongUpDownCounter(name, description, unit); - var lookedUp = instruments.getLongUpDownCounter(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerLongGauge(name, description, unit); - var lookedUp = instruments.getLongGauge(name); - assertThat(registered, sameInstance(lookedUp)); - } - { - var registered = instruments.registerLongHistogram(name, description, unit); - var lookedUp = instruments.getLongHistogram(name); - assertThat(registered, sameInstance(lookedUp)); - } - } - - public void testNameValidation() { - Instruments instruments = new Instruments(noopMeter); - - instruments.registerLongHistogram(name, description, unit); - var e = expectThrows(IllegalStateException.class, () -> instruments.registerLongHistogram(name, description, unit)); - assertThat(e.getMessage(), equalTo("LongHistogramAdapter[name] already registered")); - } -} diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 6cef9280707b8..90c76d630f0d0 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.datastreams; import org.apache.logging.log4j.core.util.Throwables; -import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionRequestBuilder; @@ -91,7 +90,6 @@ import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentType; @@ -560,10 +558,10 @@ public void testResolvabilityOfDataStreamsInAPIs() throws Exception { false ); verifyResolvability(dataStreamName, indicesAdmin().prepareRefresh(dataStreamName), false); - verifyResolvability(dataStreamName, client().prepareSearch(dataStreamName), false, 1); + verifyResolvability(dataStreamName, prepareSearch(dataStreamName), false, 1); verifyResolvability( dataStreamName, - client().prepareMultiSearch().add(client().prepareSearch(dataStreamName).setQuery(matchAllQuery())), + client().prepareMultiSearch().add(prepareSearch(dataStreamName).setQuery(matchAllQuery())), false ); verifyResolvability(dataStreamName, indicesAdmin().prepareClearCache(dataStreamName), false); @@ -606,10 +604,10 @@ public void testResolvabilityOfDataStreamsInAPIs() throws Exception { String wildcardExpression = "logs*"; verifyResolvability(wildcardExpression, indicesAdmin().prepareRefresh(wildcardExpression), false); - verifyResolvability(wildcardExpression, client().prepareSearch(wildcardExpression), false, 2); + verifyResolvability(wildcardExpression, prepareSearch(wildcardExpression), false, 2); verifyResolvability( wildcardExpression, - client().prepareMultiSearch().add(client().prepareSearch(wildcardExpression).setQuery(matchAllQuery())), + client().prepareMultiSearch().add(prepareSearch(wildcardExpression).setQuery(matchAllQuery())), false ); verifyResolvability(wildcardExpression, indicesAdmin().prepareClearCache(wildcardExpression), false); @@ -754,9 +752,9 @@ public void testDataSteamAliasWithFilter() throws Exception { ); // Searching the data stream directly should return all hits: - assertSearchHits(client().prepareSearch("logs-foobar"), "1", "2"); + assertSearchHits(prepareSearch("logs-foobar"), "1", "2"); // Search the alias should only return document 2, because it matches with the defined filter in the alias: - assertSearchHits(client().prepareSearch("foo"), "2"); + assertSearchHits(prepareSearch("foo"), "2"); // Update alias: addAction = new AliasActions(AliasActions.Type.ADD).index(dataStreamName) @@ -784,9 +782,9 @@ public void testDataSteamAliasWithFilter() throws Exception { ); // Searching the data stream directly should return all hits: - assertSearchHits(client().prepareSearch("logs-foobar"), "1", "2"); + assertSearchHits(prepareSearch("logs-foobar"), "1", "2"); // Search the alias should only return document 1, because it matches with the defined filter in the alias: - assertSearchHits(client().prepareSearch("foo"), "1"); + assertSearchHits(prepareSearch("foo"), "1"); } public void testSearchFilteredAndUnfilteredAlias() throws Exception { @@ -829,9 +827,9 @@ public void testSearchFilteredAndUnfilteredAlias() throws Exception { ); // Searching the filtered and unfiltered aliases should return all results (unfiltered): - assertSearchHits(client().prepareSearch("foo", "bar"), "1", "2"); + assertSearchHits(prepareSearch("foo", "bar"), "1", "2"); // Searching the data stream name and the filtered alias should return all results (unfiltered): - assertSearchHits(client().prepareSearch("foo", dataStreamName), "1", "2"); + assertSearchHits(prepareSearch("foo", dataStreamName), "1", "2"); } public void testRandomDataSteamAliasesUpdate() throws Exception { @@ -1297,8 +1295,7 @@ public void testSearchAllResolvesDataStreams() throws Exception { indexDocs("metrics-foo", numDocsRolledFoo); SearchRequest searchRequest = new SearchRequest("*"); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, is((long) numDocsBar + numDocsFoo + numDocsRolledFoo)); + assertHitCount(client().search(searchRequest), numDocsBar + numDocsFoo + numDocsRolledFoo); } public void testGetDataStream() throws Exception { @@ -1473,9 +1470,7 @@ public void testQueryDataStreamNameInIndexField() throws Exception { SearchRequest searchRequest = new SearchRequest("*"); searchRequest.source().query(new TermQueryBuilder("_index", "metrics-foo")); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertHitCount(client().search(searchRequest), 1); } public void testDataStreamMetadata() throws Exception { @@ -1850,7 +1845,7 @@ public void onFailure(Exception e) { client().execute( ModifyDataStreamsAction.INSTANCE, new ModifyDataStreamsAction.Request(List.of(DataStreamAction.removeBackingIndex(dataStreamName, ghostReference.getName()))) - ).actionGet() + ) ); ClusterState after = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); assertThat(after.getMetadata().dataStreams().get(dataStreamName).getIndices(), hasSize(1)); @@ -2161,11 +2156,11 @@ public void testWriteLoadAndAvgShardSizeIsStoredInABestEffort() throws Exception for (String nodeId : failingIndicesStatsNodeIds) { String nodeName = clusterStateBeforeRollover.nodes().resolveNode(nodeId).getName(); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, nodeName); - transportService.addRequestHandlingBehavior( - IndicesStatsAction.NAME + "[n]", - (handler, request, channel, task) -> channel.sendResponse(new RuntimeException("Unable to get stats")) - ); + MockTransportService.getInstance(nodeName) + .addRequestHandlingBehavior( + IndicesStatsAction.NAME + "[n]", + (handler, request, channel, task) -> channel.sendResponse(new RuntimeException("Unable to get stats")) + ); } logger.info( @@ -2227,14 +2222,11 @@ public void testNoShardSizeIsForecastedWhenAllShardStatRequestsFail() throws Exc .currentNodeId(); final String nodeName = clusterStateBeforeRollover.nodes().resolveNode(assignedShardNodeId).getName(); - final MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeName - ); - transportService.addRequestHandlingBehavior( - IndicesStatsAction.NAME + "[n]", - (handler, request, channel, task) -> channel.sendResponse(new RuntimeException("Unable to get stats")) - ); + MockTransportService.getInstance(nodeName) + .addRequestHandlingBehavior( + IndicesStatsAction.NAME + "[n]", + (handler, request, channel, task) -> channel.sendResponse(new RuntimeException("Unable to get stats")) + ); assertAcked(indicesAdmin().rolloverIndex(new RolloverRequest(dataStreamName, null)).actionGet()); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 715d2a7a4de2f..ceac7423b0b72 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -149,11 +149,7 @@ public void testSnapshotAndRestore() throws Exception { assertEquals(Collections.singletonList(dsBackingIndexName), getSnapshot(REPO, SNAPSHOT).indices()); - assertTrue( - client.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "ds" })) - .get() - .isAcknowledged() - ); + assertAcked(client.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "ds" }))); RestoreSnapshotResponse restoreSnapshotResponse = client.admin() .cluster() @@ -788,11 +784,7 @@ public void testDataStreamNotRestoredWhenIndexRequested() throws Exception { RestStatus status = createSnapshotResponse.getSnapshotInfo().status(); assertEquals(RestStatus.OK, status); - assertTrue( - client.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "ds" })) - .get() - .isAcknowledged() - ); + assertAcked(client.execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "ds" }))); RestoreSnapshotResponse restoreSnapshotResponse = client.admin() .cluster() @@ -818,12 +810,7 @@ public void testDataStreamNotIncludedInLimitedSnapshot() throws ExecutionExcepti .get(); assertThat(createSnapshotResponse.getSnapshotInfo().state(), Matchers.is(SnapshotState.SUCCESS)); - assertThat( - client().execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "*" })) - .get() - .isAcknowledged(), - is(true) - ); + assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "*" }))); final RestoreSnapshotResponse restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot(REPO, snapshotName).get(); assertThat(restoreSnapshotResponse.getRestoreInfo().indices(), empty()); @@ -911,9 +898,7 @@ public void testCloneSnapshotThatIncludesDataStream() throws Exception { .setIncludeGlobalState(false) .execute() ); - assertAcked( - clusterAdmin().prepareCloneSnapshot(REPO, sourceSnapshotName, "target-snapshot-1").setIndices(indexWithoutDataStream).get() - ); + assertAcked(clusterAdmin().prepareCloneSnapshot(REPO, sourceSnapshotName, "target-snapshot-1").setIndices(indexWithoutDataStream)); } public void testPartialRestoreSnapshotThatIncludesDataStream() { @@ -1001,9 +986,7 @@ public void testSnapshotDSDuringRolloverAndDeleteOldIndex() throws Exception { snapshotInfo.dataStreams(), not(hasItems("ds")) ); - assertAcked( - client().execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "other-ds" })).get() - ); + assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { "other-ds" }))); RestoreInfo restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot(repoName, snapshotName) .setWaitForCompletion(true) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 4e70f709a4263..e3128fd1b904b 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainIndexDataStreamLifecycle; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.DataStreamAction; @@ -53,7 +52,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; import org.junit.After; @@ -75,6 +73,7 @@ import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.TARGET_MERGE_FACTOR_VALUE; import static org.elasticsearch.index.IndexSettings.LIFECYCLE_ORIGINATION_DATE; import static org.elasticsearch.indices.ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -288,16 +287,13 @@ public void testAutomaticForceMerge() throws Exception { for (DiscoveryNode node : internalCluster().getInstance(ClusterService.class, internalCluster().getMasterName()) .state() .getNodes()) { - final MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - node.getName() - ); - transportService.addRequestHandlingBehavior(ForceMergeAction.NAME + "[n]", (handler, request, channel, task) -> { - String index = ((IndicesRequest) request).indices()[0]; - forceMergedIndices.add(index); - logger.info("Force merging {}", index); - handler.messageReceived(request, channel, task); - }); + MockTransportService.getInstance(node.getName()) + .addRequestHandlingBehavior(ForceMergeAction.NAME + "[n]", (handler, request, channel, task) -> { + String index = ((IndicesRequest) request).indices()[0]; + forceMergedIndices.add(index); + logger.info("Force merging {}", index); + handler.messageReceived(request, channel, task); + }); } CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); @@ -742,8 +738,6 @@ static void updateLifecycle(String dataStreamName, TimeValue dataRetention) { new String[] { dataStreamName }, dataRetention ); - AcknowledgedResponse putDataLifecycleResponse = client().execute(PutDataStreamLifecycleAction.INSTANCE, putDataLifecycleRequest) - .actionGet(); - assertThat(putDataLifecycleResponse.isAcknowledged(), equalTo(true)); + assertAcked(client().execute(PutDataStreamLifecycleAction.INSTANCE, putDataLifecycleRequest)); } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java index 9656ec2744bed..f447e5b80f8c8 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/DataStreamUpgradeRestIT.java @@ -65,7 +65,7 @@ public void testCompatibleMappingUpgrade() throws Exception { { "index_patterns": [ "logs-mysql-*" ], "priority": 200, - "composed_of": [ "logs-mappings", "logs-settings" ], + "composed_of": [ "logs@mappings", "logs@settings" ], "data_stream": {}, "template": { "mappings": { @@ -103,7 +103,7 @@ public void testCompatibleMappingUpgrade() throws Exception { { "index_patterns": [ "logs-mysql-*" ], "priority": 200, - "composed_of": [ "logs-mappings", "logs-settings" ], + "composed_of": [ "logs@mappings", "logs@settings" ], "data_stream": {}, "template": { "mappings": { @@ -168,7 +168,7 @@ public void testConflictingMappingUpgrade() throws Exception { { "index_patterns": [ "logs-mysql-*" ], "priority": 200, - "composed_of": [ "logs-mappings", "logs-settings" ], + "composed_of": [ "logs@mappings", "logs@settings" ], "data_stream": {}, "template": { "mappings": { @@ -205,7 +205,7 @@ public void testConflictingMappingUpgrade() throws Exception { { "index_patterns": [ "logs-mysql-*" ], "priority": 200, - "composed_of": [ "logs-mappings", "logs-settings" ], + "composed_of": [ "logs@mappings", "logs@settings" ], "data_stream": {}, "template": { "mappings": { @@ -285,7 +285,7 @@ static void verifyTotalHitCount(String index, String requestBody, int expectedTo private void waitForLogsComponentTemplateInitialization() throws Exception { assertBusy(() -> { try { - Request logsComponentTemplateRequest = new Request("GET", "/_component_template/logs-*"); + Request logsComponentTemplateRequest = new Request("GET", "/_component_template/logs@*"); Response response = client().performRequest(logsComponentTemplateRequest); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); @@ -297,7 +297,7 @@ private void waitForLogsComponentTemplateInitialization() throws Exception { List componentTemplates = (List) responseBody.get("component_templates"); assertThat(componentTemplates.size(), equalTo(2)); Set names = componentTemplates.stream().map(m -> ((Map) m).get("name")).collect(Collectors.toSet()); - assertThat(names, containsInAnyOrder("logs-mappings", "logs-settings")); + assertThat(names, containsInAnyOrder("logs@mappings", "logs@settings")); } catch (ResponseException responseException) { // Retry in case of a 404, maybe they haven't been initialized yet. if (responseException.getResponse().getStatusLine().getStatusCode() == 404) { diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java index 7de4ed2f2843c..3802d572e04dd 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/EcsLogsDataStreamIT.java @@ -47,7 +47,7 @@ public void setup() throws Exception { "processors": [ { "pipeline" : { - "name": "logs@json-message", + "name": "logs@json-pipeline", "description": "A pipeline that automatically parses JSON log events into top-level fields if they are such" } } diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java index cc8695b9e0e5b..b150c71c86122 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -294,7 +294,7 @@ public void testLogsMessagePipeline() throws Exception { "processors": [ { "pipeline" : { - "name": "logs@json-message", + "name": "logs@json-pipeline", "description": "A pipeline that automatically parses JSON log events into top-level fields if they are such" } } @@ -452,7 +452,7 @@ public void testNoSubobjects() throws Exception { "priority": 200, "data_stream": {}, "index_patterns": ["logs-*-*"], - "composed_of": ["logs-test-subobjects-mappings", "ecs@dynamic_templates"] + "composed_of": ["logs-test-subobjects-mappings", "ecs@mappings"] } """); assertOK(client.performRequest(request)); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index de128c685ae98..a845b75450366 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -18,13 +18,9 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -60,20 +56,11 @@ import org.elasticsearch.datastreams.rest.RestMigrateToDataStreamAction; import org.elasticsearch.datastreams.rest.RestModifyDataStreamsAction; import org.elasticsearch.datastreams.rest.RestPromoteDataStreamAction; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettingProvider; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; import java.time.Clock; @@ -158,38 +145,27 @@ public List> getSettings() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { Collection components = new ArrayList<>(); - var updateTimeSeriesRangeService = new UpdateTimeSeriesRangeService(environment.settings(), threadPool, clusterService); + var updateTimeSeriesRangeService = new UpdateTimeSeriesRangeService( + services.environment().settings(), + services.threadPool(), + services.clusterService() + ); this.updateTimeSeriesRangeService.set(updateTimeSeriesRangeService); components.add(this.updateTimeSeriesRangeService.get()); errorStoreInitialisationService.set(new DataStreamLifecycleErrorStore()); dataLifecycleInitialisationService.set( new DataStreamLifecycleService( settings, - new OriginSettingClient(client, DATA_STREAM_LIFECYCLE_ORIGIN), - clusterService, + new OriginSettingClient(services.client(), DATA_STREAM_LIFECYCLE_ORIGIN), + services.clusterService(), getClock(), - threadPool, - threadPool::absoluteTimeInMillis, + services.threadPool(), + services.threadPool()::absoluteTimeInMillis, errorStoreInitialisationService.get(), - allocationService + services.allocationService() ) ); dataLifecycleInitialisationService.get().init(); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index d1ea1b589b5a5..5d2d95a3dc954 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -90,6 +90,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.STARTED; +import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.UNKNOWN; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_DOWNSAMPLE_STATUS; import static org.elasticsearch.datastreams.DataStreamsPlugin.LIFECYCLE_CUSTOM_INDEX_METADATA_KEY; @@ -764,14 +765,30 @@ private Set maybeExecuteRetention(ClusterState state, DataStream dataStre for (Index index : backingIndicesOlderThanRetention) { if (indicesToExcludeForRemainingRun.contains(index) == false) { - indicesToBeRemoved.add(index); IndexMetadata backingIndex = metadata.index(index); assert backingIndex != null : "the data stream backing indices must exist"; - // there's an opportunity here to batch the delete requests (i.e. delete 100 indices / request) - // let's start simple and reevaluate - String indexName = backingIndex.getIndex().getName(); - deleteIndexOnce(indexName, "the lapsed [" + retention + "] retention period"); + IndexMetadata.DownsampleTaskStatus downsampleStatus = INDEX_DOWNSAMPLE_STATUS.get(backingIndex.getSettings()); + // we don't want to delete the source index if they have an in-progress downsampling operation because the + // target downsample index will remain in the system as a standalone index + if (downsampleStatus.equals(UNKNOWN)) { + indicesToBeRemoved.add(index); + + // there's an opportunity here to batch the delete requests (i.e. delete 100 indices / request) + // let's start simple and reevaluate + String indexName = backingIndex.getIndex().getName(); + deleteIndexOnce(indexName, "the lapsed [" + retention + "] retention period"); + } else { + // there's an opportunity here to cancel downsampling and delete the source index now + logger.trace( + "Data stream lifecycle skips deleting index [{}] even though its retention period [{}] has lapsed " + + "because there's a downsampling operation currently in progress for this index. Current downsampling " + + "status is [{}]. When downsampling completes, DSL will delete this index.", + index.getName(), + retention, + downsampleStatus + ); + } } } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java index b66f734dfac5e..da0caff9e591d 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/DataStreamsStatsTests.java @@ -39,6 +39,7 @@ import java.util.concurrent.TimeUnit; import static java.lang.Math.max; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; public class DataStreamsStatsTests extends ESSingleNodeTestCase { @@ -235,15 +236,13 @@ private String createDataStream(boolean hidden) throws Exception { new ComposableIndexTemplate.DataStreamTemplate(hidden, false), null ); - assertTrue( + assertAcked( client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request(dataStreamName + "_template").indexTemplate(template) - ).actionGet().isAcknowledged() - ); - assertTrue( - client().execute(CreateDataStreamAction.INSTANCE, new CreateDataStreamAction.Request(dataStreamName)).get().isAcknowledged() + ) ); + assertAcked(client().execute(CreateDataStreamAction.INSTANCE, new CreateDataStreamAction.Request(dataStreamName))); createdDataStreams.add(dataStreamName); return dataStreamName; } @@ -281,17 +280,13 @@ private DataStreamsStatsAction.Response getDataStreamsStats(boolean includeHidde return client().execute(DataStreamsStatsAction.INSTANCE, request).get(); } - private void deleteDataStream(String dataStreamName) throws InterruptedException, java.util.concurrent.ExecutionException { - assertTrue( - client().execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { dataStreamName })) - .get() - .isAcknowledged() - ); - assertTrue( + private void deleteDataStream(String dataStreamName) { + assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, new DeleteDataStreamAction.Request(new String[] { dataStreamName }))); + assertAcked( client().execute( DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request(dataStreamName + "_template") - ).actionGet().isAcknowledged() + ) ); } } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java index c0cb1e5452c3d..0391f91a35fb3 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -491,7 +492,7 @@ private static ClusterState createClusterState(String dataStreamName, int number .put("index.mode", "time_series") .put("index.routing_path", "uid"); if (includeVersion) { - settings.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.V_8_9_0); + settings.put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_8_9_0); } builder.put(IndexMetadata.builder(backingIndex.getName()).settings(settings).numberOfShards(1).numberOfReplicas(0)); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index f1e74a936e781..fd9664dd94493 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -98,6 +98,7 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock.WRITE; import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.STARTED; import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.SUCCESS; +import static org.elasticsearch.cluster.metadata.IndexMetadata.DownsampleTaskStatus.UNKNOWN; import static org.elasticsearch.datastreams.DataStreamsPlugin.LIFECYCLE_CUSTOM_INDEX_METADATA_KEY; import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleFixtures.createDataStream; import static org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService.DATA_STREAM_MERGE_POLICY_TARGET_FACTOR_SETTING; @@ -293,6 +294,84 @@ public void testRetentionNotExecutedForTSIndicesWithinTimeBounds() { assertThat(((DeleteIndexRequest) deleteIndexRequest).indices(), is(new String[] { dataStream.getIndices().get(0).getName() })); } + public void testRetentionSkippedWhilstDownsamplingInProgress() { + String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + int numBackingIndices = 3; + Metadata.Builder builder = Metadata.builder(); + DataStream dataStream = createDataStream( + builder, + dataStreamName, + numBackingIndices, + settings(IndexVersion.current()), + DataStreamLifecycle.newBuilder().dataRetention(TimeValue.timeValueMillis(0)).build(), + now + ); + builder.put(dataStream); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT).metadata(builder).build(); + + { + Metadata metadata = state.metadata(); + Metadata.Builder metaBuilder = Metadata.builder(metadata); + + String firstBackingIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + IndexMetadata indexMetadata = metadata.index(firstBackingIndex); + IndexMetadata.Builder indexMetaBuilder = IndexMetadata.builder(indexMetadata); + indexMetaBuilder.settings( + Settings.builder() + .put(indexMetadata.getSettings()) + .put( + IndexMetadata.INDEX_DOWNSAMPLE_STATUS_KEY, + randomValueOtherThan(UNKNOWN, () -> randomFrom(IndexMetadata.DownsampleTaskStatus.values())) + ) + ); + indexMetaBuilder.putCustom( + LIFECYCLE_CUSTOM_INDEX_METADATA_KEY, + Map.of(FORCE_MERGE_COMPLETED_TIMESTAMP_METADATA_KEY, String.valueOf(System.currentTimeMillis())) + ); + metaBuilder.put(indexMetaBuilder); + state = ClusterState.builder(ClusterName.DEFAULT).metadata(metaBuilder).build(); + + dataStreamLifecycleService.run(state); + assertThat(clientSeenRequests.size(), is(2)); // rollover the write index and delete the second generation + assertThat(clientSeenRequests.get(0), instanceOf(RolloverRequest.class)); + assertThat(clientSeenRequests.get(1), instanceOf(DeleteIndexRequest.class)); + assertThat( + ((DeleteIndexRequest) clientSeenRequests.get(1)).indices()[0], + is(DataStream.getDefaultBackingIndexName(dataStreamName, 2)) + ); + } + + { + // a lack of downsample status (i.e. the default `UNKNOWN`) must not prevent retention + Metadata metadata = state.metadata(); + Metadata.Builder metaBuilder = Metadata.builder(metadata); + + String firstBackingIndex = DataStream.getDefaultBackingIndexName(dataStreamName, 1); + IndexMetadata indexMetadata = metadata.index(firstBackingIndex); + IndexMetadata.Builder indexMetaBuilder = IndexMetadata.builder(indexMetadata); + indexMetaBuilder.settings( + Settings.builder().put(indexMetadata.getSettings()).putNull(IndexMetadata.INDEX_DOWNSAMPLE_STATUS_KEY) + ); + metaBuilder.put(indexMetaBuilder); + state = ClusterState.builder(ClusterName.DEFAULT).metadata(metaBuilder).build(); + + dataStreamLifecycleService.run(state); + assertThat(clientSeenRequests.size(), is(3)); // rollover the write index and delete the other two generations + assertThat(clientSeenRequests.get(0), instanceOf(RolloverRequest.class)); + assertThat(clientSeenRequests.get(1), instanceOf(DeleteIndexRequest.class)); + assertThat( + ((DeleteIndexRequest) clientSeenRequests.get(1)).indices()[0], + is(DataStream.getDefaultBackingIndexName(dataStreamName, 2)) + ); + assertThat(clientSeenRequests.get(2), instanceOf(DeleteIndexRequest.class)); + assertThat( + ((DeleteIndexRequest) clientSeenRequests.get(2)).indices()[0], + is(DataStream.getDefaultBackingIndexName(dataStreamName, 1)) + ); + } + } + public void testIlmManagedIndicesAreSkipped() { String dataStreamName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); int numBackingIndices = 3; diff --git a/modules/health-shards-availability/build.gradle b/modules/health-shards-availability/build.gradle new file mode 100644 index 0000000000000..6c7cf5a19c8ac --- /dev/null +++ b/modules/health-shards-availability/build.gradle @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +apply plugin: 'elasticsearch.internal-yaml-rest-test' +apply plugin: 'elasticsearch.yaml-rest-compat-test' +apply plugin: 'elasticsearch.internal-cluster-test' + +esplugin { + description 'Health report API extension providing the shards_availability output' + classname 'org.elasticsearch.health.plugin.ShardsAvailabilityPlugin' +} + +restResources { + restApi { + include '_common', 'indices', 'index', 'cluster', 'nodes', 'get', 'ingest' + } +} + +tasks.named("yamlRestTestV7CompatTransform").configure {task -> + task.addAllowedWarningRegex("setting \\[ecs\\] is deprecated as ECS format is the default and only option") +} diff --git a/modules/health-shards-availability/src/main/java/org/elasticsearch/health/plugin/ShardsAvailabilityPlugin.java b/modules/health-shards-availability/src/main/java/org/elasticsearch/health/plugin/ShardsAvailabilityPlugin.java new file mode 100644 index 0000000000000..aeb45424ebc58 --- /dev/null +++ b/modules/health-shards-availability/src/main/java/org/elasticsearch/health/plugin/ShardsAvailabilityPlugin.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health.plugin; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.cluster.routing.allocation.ShardsAvailabilityHealthIndicatorService; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.plugins.HealthPlugin; +import org.elasticsearch.plugins.Plugin; + +import java.util.Collection; +import java.util.Set; + +public class ShardsAvailabilityPlugin extends Plugin implements HealthPlugin { + + private final SetOnce shardHealthService = new SetOnce<>(); + + public ShardsAvailabilityPlugin() {} + + @Override + public Collection createComponents(PluginServices services) { + this.shardHealthService.set( + new ShardsAvailabilityHealthIndicatorService(services.clusterService(), services.allocationService(), services.systemIndices()) + ); + return Set.of(this.shardHealthService.get()); + } + + @Override + public Collection getHealthIndicatorServices() { + return Set.of(this.shardHealthService.get()); + } +} diff --git a/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 13fd3bdbcb531..c8d0e0f96f85b 100644 --- a/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/modules/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -107,6 +107,15 @@ static String parse(final byte content[], final Metadata metadata, final int lim } else { throw new AssertionError(cause); } + } catch (LinkageError e) { + if (e.getMessage().contains("bouncycastle")) { + /* + * Elasticsearch does not ship with bouncycastle. It is only used for public-key-encrypted PDFs, which this module does + * not support anyway. + */ + throw new RuntimeException("document is encrypted", e); + } + throw new RuntimeException(e); } } diff --git a/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 1fead50a600e7..20b0f19e9549e 100644 --- a/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/modules/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -243,11 +243,25 @@ public void testVisioIsExcluded() throws Exception { assertThat(attachmentData.get("content_length"), is(0L)); } - public void testEncryptedPdf() throws Exception { + public void testEncryptedWithPasswordPdf() throws Exception { + /* + * This tests that a PDF that has been encrypted with a password fails in the way expected + */ ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> parseDocument("encrypted.pdf", processor)); assertThat(e.getDetailedMessage(), containsString("document is encrypted")); } + public void testEncryptedWithKeyPdf() throws Exception { + /* + * This tests that a PDF that has been encrypted with a public key fails in the way expected + */ + ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> parseDocument("encrypted-with-key.pdf", processor) + ); + assertThat(e.getDetailedMessage(), containsString("document is encrypted")); + } + public void testHtmlDocument() throws Exception { Map attachmentData = parseDocument("htmlWithEmptyDateMeta.html", processor); diff --git a/modules/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/encrypted-with-key.pdf b/modules/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/encrypted-with-key.pdf new file mode 100644 index 0000000000000..6bf5f6a0ba287 Binary files /dev/null and b/modules/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/encrypted-with-key.pdf differ diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 12b5d4630373f..9a739132e5808 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -216,6 +216,7 @@ public void testInvalidTimestamp() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/92888") public void testUpdatedTimestamp() throws Exception { assumeTrue("only test with fixture to have stable results", getEndpoint() != null); testGeoIpDatabasesDownload(); @@ -227,6 +228,7 @@ public void testUpdatedTimestamp() throws Exception { testGeoIpDatabasesDownload(); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/92888") public void testGeoIpDatabasesDownload() throws Exception { putGeoIpPipeline(); updateClusterSettings(Settings.builder().put(GeoIpDownloaderTaskExecutor.ENABLED_SETTING.getKey(), true)); @@ -251,8 +253,7 @@ public void testGeoIpDatabasesDownload() throws Exception { BoolQueryBuilder queryBuilder = new BoolQueryBuilder().filter(new MatchQueryBuilder("name", id)) .filter(new RangeQueryBuilder("chunk").from(metadata.firstChunk()).to(metadata.lastChunk(), true)); int size = metadata.lastChunk() - metadata.firstChunk() + 1; - SearchResponse res = client().prepareSearch(GeoIpDownloader.DATABASES_INDEX) - .setSize(size) + SearchResponse res = prepareSearch(GeoIpDownloader.DATABASES_INDEX).setSize(size) .setQuery(queryBuilder) .addSort("chunk", SortOrder.ASC) .get(); diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java index afc6fa8a1c92a..26ddbaa7ba854 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/IngestGeoIpPlugin.java @@ -16,7 +16,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; @@ -25,9 +24,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.ingest.Processor; @@ -43,14 +39,10 @@ import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -107,30 +99,20 @@ public Map getProcessors(Processor.Parameters paramet } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { try { - String nodeId = nodeEnvironment.nodeId(); - databaseRegistry.get().initialize(nodeId, resourceWatcherService, ingestService.get()); + String nodeId = services.nodeEnvironment().nodeId(); + databaseRegistry.get().initialize(nodeId, services.resourceWatcherService(), ingestService.get()); } catch (IOException e) { throw new UncheckedIOException(e); } - geoIpDownloaderTaskExecutor = new GeoIpDownloaderTaskExecutor(client, new HttpClient(), clusterService, threadPool); + geoIpDownloaderTaskExecutor = new GeoIpDownloaderTaskExecutor( + services.client(), + new HttpClient(), + services.clusterService(), + services.threadPool() + ); geoIpDownloaderTaskExecutor.init(); return List.of(databaseRegistry.get(), geoIpDownloaderTaskExecutor); } diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java index 69b0046e5e207..f71a55f4f6be0 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java @@ -44,7 +44,6 @@ import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -67,7 +66,7 @@ private SearchRequestBuilder buildRequest(String script, Object... params) { paramsMap.put(params[i].toString(), params[i + 1]); } - SearchRequestBuilder req = client().prepareSearch().setIndices("test"); + SearchRequestBuilder req = prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) .addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC).unmappedType("long")) .addScriptField("foo", new Script(ScriptType.INLINE, "expression", script, paramsMap)); @@ -88,7 +87,7 @@ public void testFunction() throws Exception { ensureGreen("test"); client().prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); SearchResponse rsp = buildRequest("doc['foo'] + abs(1)").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); } @@ -114,18 +113,18 @@ public void testScore() throws Exception { ScriptScoreFunctionBuilder score = ScoreFunctionBuilders.scriptFunction( new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap()) ); - SearchRequestBuilder req = client().prepareSearch().setIndices("test"); + SearchRequestBuilder req = prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); req.setSearchType(SearchType.DFS_QUERY_THEN_FETCH); // make sure DF is consistent SearchResponse rsp = req.get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); SearchHits hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); - req = client().prepareSearch().setIndices("test"); + req = prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.functionScoreQuery(QueryBuilders.termQuery("text", "hello"), score).boostMode(CombineFunction.REPLACE)); score = ScoreFunctionBuilders.scriptFunction(new Script(ScriptType.INLINE, "expression", "1 / _score", Collections.emptyMap())); req.addAggregation(AggregationBuilders.max("max_score").script((score).getScript())); @@ -223,7 +222,7 @@ public void testMultiValueMethods() throws Exception { ); SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); SearchHits hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -231,7 +230,7 @@ public void testMultiValueMethods() throws Exception { assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); rsp = buildRequest("doc['double0'].sum()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -239,7 +238,7 @@ public void testMultiValueMethods() throws Exception { assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D); rsp = buildRequest("doc['double0'].avg() + doc['double1'].avg()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -247,7 +246,7 @@ public void testMultiValueMethods() throws Exception { assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D); rsp = buildRequest("doc['double0'].median()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -255,7 +254,7 @@ public void testMultiValueMethods() throws Exception { assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D); rsp = buildRequest("doc['double0'].min()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -263,7 +262,7 @@ public void testMultiValueMethods() throws Exception { assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D); rsp = buildRequest("doc['double0'].max()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -271,7 +270,7 @@ public void testMultiValueMethods() throws Exception { assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); rsp = buildRequest("doc['double0'].sum()/doc['double0'].count()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -280,7 +279,7 @@ public void testMultiValueMethods() throws Exception { // make sure count() works for missing rsp = buildRequest("doc['double2'].count()").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -289,7 +288,7 @@ public void testMultiValueMethods() throws Exception { // make sure .empty works in the same way rsp = buildRequest("doc['double2'].empty ? 5.0 : 2.0").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); hits = rsp.getHits(); assertEquals(3, hits.getTotalHits().value); assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -327,7 +326,7 @@ public void testSparseField() throws Exception { client().prepareIndex("test").setId("2").setSource("id", 2, "y", 2) ); SearchResponse rsp = buildRequest("doc['x'] + 1").get(); - ElasticsearchAssertions.assertSearchResponse(rsp); + assertNoFailures(rsp); SearchHits hits = rsp.getHits(); assertEquals(2, rsp.getHits().getTotalHits().value); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); @@ -467,7 +466,7 @@ public void testSpecialValueVariable() throws Exception { client().prepareIndex("test").setId("3").setSource("x", 13, "y", 1.8) ); - SearchRequestBuilder req = client().prepareSearch().setIndices("test"); + SearchRequestBuilder req = prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.stats("int_agg") @@ -513,7 +512,7 @@ public void testStringSpecialValueVariable() throws Exception { client().prepareIndex("test").setId("3").setSource("text", "hello") ); - SearchRequestBuilder req = client().prepareSearch().setIndices("test"); + SearchRequestBuilder req = prepareSearch().setIndices("test"); req.setQuery(QueryBuilders.matchAllQuery()) .addAggregation( AggregationBuilders.terms("term_agg") @@ -566,30 +565,22 @@ public void testPipelineAggregationScript() throws Exception { client().prepareIndex("agg_index").setId("4").setSource("one", 4.0, "two", 2.0, "three", 3.0, "four", 4.0), client().prepareIndex("agg_index").setId("5").setSource("one", 5.0, "two", 2.0, "three", 3.0, "four", 4.0) ); - SearchResponse response = client().prepareSearch("agg_index") - .addAggregation( - histogram("histogram").field("one") - .interval(2) - .subAggregation(sum("twoSum").field("two")) - .subAggregation(sum("threeSum").field("three")) - .subAggregation(sum("fourSum").field("four")) - .subAggregation( - bucketScript( - "totalSum", - new Script( - ScriptType.INLINE, - ExpressionScriptEngine.NAME, - "_value0 + _value1 + _value2", - Collections.emptyMap() - ), - "twoSum", - "threeSum", - "fourSum" - ) + SearchResponse response = prepareSearch("agg_index").addAggregation( + histogram("histogram").field("one") + .interval(2) + .subAggregation(sum("twoSum").field("two")) + .subAggregation(sum("threeSum").field("three")) + .subAggregation(sum("fourSum").field("four")) + .subAggregation( + bucketScript( + "totalSum", + new Script(ScriptType.INLINE, ExpressionScriptEngine.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "twoSum", + "threeSum", + "fourSum" ) - ) - .execute() - .actionGet(); + ) + ).execute().actionGet(); Histogram histogram = response.getAggregations().get("histogram"); assertThat(histogram, notNullValue()); @@ -640,22 +631,22 @@ public void testGeo() throws Exception { refresh(); // access .lat SearchResponse rsp = buildRequest("doc['location'].lat").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); // access .lon rsp = buildRequest("doc['location'].lon").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); // access .empty rsp = buildRequest("doc['location'].empty ? 1 : 0").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); // call haversin rsp = buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); } @@ -678,14 +669,14 @@ public void testBoolean() throws Exception { ); // access .value SearchResponse rsp = buildRequest("doc['vip'].value").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(3, rsp.getHits().getTotalHits().value); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); // access .empty rsp = buildRequest("doc['vip'].empty ? 1 : 0").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(3, rsp.getHits().getTotalHits().value); assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); @@ -693,7 +684,7 @@ public void testBoolean() throws Exception { // ternary operator // vip's have a 50% discount rsp = buildRequest("doc['vip'] ? doc['price']/2 : doc['price']").get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(3, rsp.getHits().getTotalHits().value); assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); @@ -712,7 +703,7 @@ public void testFilterScript() throws Exception { Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); SearchResponse rsp = builder.get(); - assertSearchResponse(rsp); + assertNoFailures(rsp); assertEquals(1, rsp.getHits().getTotalHits().value); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); } diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/StoredExpressionIT.java index 6fde54668cf51..dcf380d338c14 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/StoredExpressionIT.java @@ -50,24 +50,19 @@ public void testAllOpsDisabledIndexedScripts() throws IOException { assertThat(e.getCause().getMessage(), containsString("Failed to compile stored script [script1] using lang [expression]")); } try { - client().prepareSearch() - .setSource( - new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) - ) - .setIndices("test") - .get(); + prepareSearch().setSource( + new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) + ).setIndices("test").get(); fail("search script should have been rejected"); } catch (Exception e) { assertThat(e.toString(), containsString("cannot execute scripts using [field] context")); } try { - client().prepareSearch("test") - .setSource( - new SearchSourceBuilder().aggregation( - AggregationBuilders.terms("test").script(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) - ) + prepareSearch("test").setSource( + new SearchSourceBuilder().aggregation( + AggregationBuilders.terms("test").script(new Script(ScriptType.STORED, null, "script1", Collections.emptyMap())) ) - .get(); + ).get(); } catch (Exception e) { assertThat(e.toString(), containsString("cannot execute scripts using [aggs] context")); } diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index b2a8de93a2c32..000728209456f 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.FailBeforeCurrentVersionQueryBuilder; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.json.JsonXContent; import java.util.Arrays; @@ -175,10 +176,9 @@ public void testBasic() throws Exception { assertThat(response4.getFailure().getMessage(), equalTo("no such index [unknown]")); MultiSearchTemplateResponse.Item response5 = response.getResponses()[4]; - assertThat(response5.isFailure(), is(false)); - SearchTemplateResponse searchTemplateResponse5 = response5.getResponse(); - assertThat(searchTemplateResponse5.hasResponse(), is(false)); - assertThat(searchTemplateResponse5.getSource().utf8ToString(), equalTo("{\"query\":{\"terms\":{\"group\":[1,2,3,]}}}")); + assertThat(response5.isFailure(), is(true)); + assertNull(response5.getResponse()); + assertThat(response5.getFailure(), instanceOf(XContentParseException.class)); } /** diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 18365abc820d0..517828cbeba3c 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.DummyQueryParserPlugin; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.junit.Before; @@ -189,6 +190,57 @@ public void testIndexedTemplateClient() throws Exception { assertNull(getResponse.getSource()); } + public void testBadTemplate() { + + // This template will produce badly formed json if given a multi-valued `text_fields` parameter, + // as it does not add commas between the entries. We test that it produces a 400 json parsing + // error both when used directly and when used in a render template request. + + String script = """ + { + "query": { + "multi_match": { + "query": "{{query_string}}", + "fields": [{{#text_fields}}"{{name}}^{{boost}}"{{/text_fields}}] + } + }, + "from": "{{from}}", + "size": "{{size}}" + }"""; + + Map params = Map.of( + "text_fields", + List.of(Map.of("name", "title", "boost", 10), Map.of("name", "description", "boost", 2)), + "from", + 0, + "size", + 0 + ); + + { + XContentParseException e = expectThrows(XContentParseException.class, () -> { + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest()) + .setScript(script) + .setScriptParams(params) + .setScriptType(ScriptType.INLINE) + .get(); + }); + assertThat(e.getMessage(), containsString("Unexpected character")); + } + + { + XContentParseException e = expectThrows(XContentParseException.class, () -> { + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest()) + .setScript(script) + .setScriptParams(params) + .setScriptType(ScriptType.INLINE) + .setSimulate(true) + .get(); + }); + assertThat(e.getMessage(), containsString("Unexpected character")); + } + } + public void testIndexedTemplate() throws Exception { String script = """ diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index ef19579c87625..2b315f48dcce4 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -104,20 +104,22 @@ static SearchRequest convert( response.setSource(new BytesArray(source)); SearchRequest searchRequest = searchTemplateRequest.getRequest(); - if (searchTemplateRequest.isSimulate()) { - return null; - } + + SearchSourceBuilder builder = SearchSourceBuilder.searchSource(); XContentParserConfiguration parserConfig = XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, source)) { - SearchSourceBuilder builder = SearchSourceBuilder.searchSource(); builder.parseXContent(parser, false, searchUsageHolder); - builder.explain(searchTemplateRequest.isExplain()); - builder.profile(searchTemplateRequest.isProfile()); - checkRestTotalHitsAsInt(searchRequest, builder); - searchRequest.source(builder); } + + if (searchTemplateRequest.isSimulate()) { + return null; + } + builder.explain(searchTemplateRequest.isExplain()); + builder.profile(searchTemplateRequest.isProfile()); + checkRestTotalHitsAsInt(searchRequest, builder); + searchRequest.source(builder); return searchRequest; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 52eccbe0dce90..62302331b38d8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -11,20 +11,13 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.painless.action.PainlessContextAction; import org.elasticsearch.painless.action.PainlessExecuteAction; import org.elasticsearch.painless.spi.PainlessExtension; @@ -36,17 +29,11 @@ import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.ArrayList; import java.util.Arrays; @@ -135,22 +122,7 @@ public ScriptEngine getScriptEngine(Settings settings, Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { // this is a hack to bind the painless script engine in guice (all components are added to guice), so that // the painless context api. this is a temporary measure until transport actions do no require guice return Collections.singletonList(painlessScriptEngine.get()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java index 8ec90c7d04979..7393dff40fa11 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/action/PainlessExecuteAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; @@ -776,7 +777,13 @@ private static Response prepareRamIndex( try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(defaultAnalyzer))) { BytesReference document = request.contextSetup.document; XContentType xContentType = request.contextSetup.xContentType; - SourceToParse sourceToParse = new SourceToParse("_id", document, xContentType); + String id; + if (indexService.getIndexSettings().getMode() == IndexMode.TIME_SERIES) { + id = null; // The id gets auto generated for time series indices. + } else { + id = "_id"; + } + SourceToParse sourceToParse = new SourceToParse(id, document, xContentType); DocumentMapper documentMapper = indexService.mapperService().documentMapper(); if (documentMapper == null) { documentMapper = DocumentMapper.createEmpty(indexService.mapperService()); diff --git a/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java b/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java index ba441bad0815f..a7fe63eb34ce6 100644 --- a/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java +++ b/modules/legacy-geo/src/internalClusterTest/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.legacygeo.search; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.geometry.Circle; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.legacygeo.test.TestLegacyGeoShapeFieldMapperPlugin; @@ -24,7 +23,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; public class LegacyGeoShapeIT extends GeoShapeIntegTestCase { @@ -57,7 +56,6 @@ public void testLegacyCircle() throws Exception { assertAcked( prepareCreate("test").setSettings(settings(randomSupportedVersion()).build()) .setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash") - .get() ); ensureGreen(); @@ -74,7 +72,6 @@ public void testLegacyCircle() throws Exception { })); // test self crossing of circles - SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Circle(30, 50, 77000))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch("test").setQuery(geoShapeQuery("shape", new Circle(30, 50, 77000))), 1L); } } diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java index 51cc7541a9a4d..3ae6e29802962 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; import org.elasticsearch.index.mapper.DocumentParserContext; @@ -243,7 +244,7 @@ public Builder(String name, IndexVersion version, boolean ignoreMalformedByDefau }); // Set up serialization - if (version.onOrAfter(IndexVersion.V_7_0_0)) { + if (version.onOrAfter(IndexVersions.V_7_0_0)) { this.strategy.alwaysSerialize(); } // serialize treeLevels if treeLevels is configured, OR if defaults are requested and precision is not configured diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java index 98effff65d8ed..5ef5eb6c0b5b8 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; import org.elasticsearch.legacygeo.parsers.ShapeParser; @@ -383,7 +384,7 @@ public void testParse3DPolygon() throws IOException, ParseException { LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( MapperBuilderContext.root(false, false) ); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java index 5037e0daff13e..6e8a61277cccf 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoWKTShapeParserTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.geometry.MultiLine; import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.legacygeo.builders.CoordinatesBuilder; import org.elasticsearch.legacygeo.builders.EnvelopeBuilder; @@ -323,7 +324,7 @@ public void testParseMixedDimensionPolyWithHoleStoredZ() throws IOException { XContentParser parser = createParser(xContentBuilder); parser.nextToken(); - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( MapperBuilderContext.root(false, false) ); @@ -347,7 +348,7 @@ public void testParsePolyWithStoredZ() throws IOException { XContentParser parser = createParser(xContentBuilder); parser.nextToken(); - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); final LegacyGeoShapeFieldMapper mapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).build( MapperBuilderContext.root(false, false) ); @@ -363,7 +364,7 @@ public void testParseOpenPolygon() throws IOException { XContentParser parser = createParser(xContentBuilder); parser.nextToken(); - final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + final IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); final LegacyGeoShapeFieldMapper defaultMapperBuilder = new LegacyGeoShapeFieldMapper.Builder("test", version, false, true).coerce( false ).build(MapperBuilderContext.root(false, false)); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java index e93eaafd13f58..91a94fe174c21 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldMapperTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; @@ -115,7 +116,7 @@ protected boolean supportsMeta() { @Override protected IndexVersion getVersion() { - return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); } public void testLegacySwitches() throws IOException { diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java index 1e2cc84fd4520..dc74b9cd295ce 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/mapper/LegacyGeoShapeFieldTypeTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -35,7 +36,7 @@ public void testSetStrategyName() { } public void testFetchSourceValue() throws IOException { - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MappedFieldType mapper = new LegacyGeoShapeFieldMapper.Builder("field", version, false, true).build( MapperBuilderContext.root(false, false) ).fieldType(); diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java index 56856ec571274..6ef1f4c8a99b6 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.legacygeo.search; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -32,6 +31,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoIntersectionQuery; import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -101,9 +101,7 @@ public void testPointsOnlyExplicit() throws Exception { .get(); // test that point was inserted - SearchResponse response = client().prepareSearch("geo_points_only").setQuery(matchAllQuery()).get(); - - assertEquals(2, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch("geo_points_only").setQuery(matchAllQuery()).get(), 2L); } public void testPointsOnly() throws Exception { @@ -139,10 +137,7 @@ public void testPointsOnly() throws Exception { } // test that point was inserted - SearchResponse response = client().prepareSearch("geo_points_only") - .setQuery(geoIntersectionQuery(defaultFieldName, geometry)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch("geo_points_only").setQuery(geoIntersectionQuery(defaultFieldName, geometry)), 1L); } public void testFieldAlias() throws IOException { @@ -172,8 +167,7 @@ public void testFieldAlias() throws IOException { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", multiPoint)).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", multiPoint)), 1L); } /** diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java index 3a6d7f5a52e8c..9e5ca7a3cdc05 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java @@ -66,11 +66,9 @@ public void testHighlightingWithMatchOnlyTextFieldMatchPhrase() throws IOExcepti BulkResponse bulkItemResponses = bulk.get(); assertNoFailures(bulkItemResponses); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.matchPhraseQuery("message", "marking and sending shard")) - .setSize(500) - .highlighter(new HighlightBuilder().field("message")) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + QueryBuilders.matchPhraseQuery("message", "marking and sending shard") + ).setSize(500).highlighter(new HighlightBuilder().field("message")).get(); assertNoFailures(searchResponse); for (SearchHit searchHit : searchResponse.getHits()) { assertThat( @@ -114,11 +112,9 @@ public void testHighlightingWithMatchOnlyTextFieldSyntheticSource() throws IOExc BulkResponse bulkItemResponses = bulk.get(); assertNoFailures(bulkItemResponses); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.matchPhraseQuery("message", "marking and sending shard")) - .setSize(500) - .highlighter(new HighlightBuilder().field("message")) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + QueryBuilders.matchPhraseQuery("message", "marking and sending shard") + ).setSize(500).highlighter(new HighlightBuilder().field("message")).get(); assertNoFailures(searchResponse); for (SearchHit searchHit : searchResponse.getHits()) { assertThat( diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index badc2dd568f57..87699f285063f 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -39,30 +39,24 @@ protected Collection> nodePlugins() { public void testRankFeaturesTermQuery() throws IOException { init(); - SearchResponse response = client().prepareSearch(INDEX_NAME) - .setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)) - .get(); + SearchResponse response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)).get(); assertThat(response.getHits().getTotalHits().value, equalTo(2L)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(20f)); } - response = client().prepareSearch(INDEX_NAME) - .setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)) - .get(); + response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)).get(); assertThat(response.getHits().getTotalHits().value, equalTo(2L)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(2000f)); } - response = client().prepareSearch(INDEX_NAME) - .setQuery( - QueryBuilders.boolQuery() - .should(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)) - .should(QueryBuilders.termQuery(FIELD_NAME, LOWER_RANKED_FEATURE).boost(3f)) - .minimumShouldMatch(1) - ) - .get(); + response = prepareSearch(INDEX_NAME).setQuery( + QueryBuilders.boolQuery() + .should(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)) + .should(QueryBuilders.termQuery(FIELD_NAME, LOWER_RANKED_FEATURE).boost(3f)) + .minimumShouldMatch(1) + ).get(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); for (SearchHit hit : response.getHits().getHits()) { if (hit.getId().equals("all")) { @@ -76,7 +70,7 @@ public void testRankFeaturesTermQuery() throws IOException { } } - response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")).get(); + response = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")).get(); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index b2a7560985165..ed5d89ad1df8c 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -175,19 +175,19 @@ private IndexRequestBuilder prepareIndex(String id, String... texts) throws IOEx } private SearchResponse searchById(String id) { - return prepareSearch().setQuery(QueryBuilders.termQuery("_id", id)).get(); + return prepareTokenCountFieldMapperSearch().setQuery(QueryBuilders.termQuery("_id", id)).get(); } private SearchRequestBuilder searchByNumericRange(int low, int high) { - return prepareSearch().setQuery( + return prepareTokenCountFieldMapperSearch().setQuery( QueryBuilders.rangeQuery( randomFrom(Arrays.asList("foo.token_count", "foo.token_count_unstored", "foo.token_count_with_doc_values")) ).gte(low).lte(high) ); } - private SearchRequestBuilder prepareSearch() { - SearchRequestBuilder request = client().prepareSearch("test"); + private SearchRequestBuilder prepareTokenCountFieldMapperSearch() { + SearchRequestBuilder request = prepareSearch("test"); request.addStoredField("foo.token_count"); request.addStoredField("foo.token_count_without_position_increments"); if (loadCountedFields) { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java index fa7c61cdd80b6..ee04346591009 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapper.java @@ -39,6 +39,9 @@ import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData; import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockSourceReader; +import org.elasticsearch.index.mapper.BlockStoredFieldsReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -318,6 +321,14 @@ public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, return toQuery(query, queryShardContext); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (textFieldType.isSyntheticSource()) { + return BlockStoredFieldsReader.bytesRefsFromStrings(storedFieldNameForSyntheticSource()); + } + return BlockSourceReader.bytesRefs(SourceValueFetcher.toString(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { if (fieldDataContext.fielddataOperation() != FielddataOperation.SCRIPT) { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java index 43951878933fa..abed23621d5e9 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapper.java @@ -30,6 +30,9 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SourceValueFetcherSortedDoubleIndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MapperBuilderContext; @@ -303,6 +306,19 @@ public Query rangeQuery( return NumberFieldMapper.NumberType.LONG.rangeQuery(name(), lo, hi, true, true, hasDocValues(), context, isIndexed()); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (indexMode == IndexMode.TIME_SERIES && metricType == TimeSeriesParams.MetricType.COUNTER) { + // Counters are not supported by ESQL so we load them in null + return BlockDocValuesReader.nulls(); + } + if (hasDocValues()) { + double scalingFactorInverse = 1d / scalingFactor; + return BlockDocValuesReader.doubles(name(), l -> l * scalingFactorInverse); + } + return BlockSourceReader.doubles(sourceValueFetcher(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index 4c738ddcffce1..e1fbc2e149441 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.tests.analysis.CannedTokenStream; import org.apache.lucene.tests.analysis.Token; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.DocumentMapper; @@ -42,6 +43,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.function.Function; import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsString; @@ -261,4 +263,9 @@ public void testDocValuesLoadedFromSynthetic() throws IOException { protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); } + + @Override + protected Function loadBlockExpected() { + return v -> ((BytesRef) v).utf8ToString(); + } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java index ad0610732b758..665e9289c3c7d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/ScaledFloatFieldMapperTests.java @@ -30,17 +30,20 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; +import org.hamcrest.Matcher; import org.junit.AssumptionViolatedException; import java.io.IOException; import java.util.Collection; import java.util.List; +import java.util.function.Function; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notANumber; public class ScaledFloatFieldMapperTests extends MapperTestCase { @@ -368,13 +371,15 @@ private static class ScaledFloatSyntheticSourceSupport implements SyntheticSourc public SyntheticSourceExample example(int maxValues) { if (randomBoolean()) { Tuple v = generateValue(); - return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping); + return new SyntheticSourceExample(v.v1(), v.v2(), roundDocValues(v.v2()), this::mapping); } List> values = randomList(1, maxValues, this::generateValue); List in = values.stream().map(Tuple::v1).toList(); List outList = values.stream().map(Tuple::v2).sorted().toList(); Object out = outList.size() == 1 ? outList.get(0) : outList; - return new SyntheticSourceExample(in, out, this::mapping); + List outBlockList = values.stream().map(v -> roundDocValues(v.v2())).sorted().toList(); + Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + return new SyntheticSourceExample(in, out, outBlock, this::mapping); } private Tuple generateValue() { @@ -398,6 +403,11 @@ private double round(double d) { return decoded; } + private double roundDocValues(double d) { + long encoded = Math.round(d * scalingFactor); + return encoded * (1 / scalingFactor); + } + private void mapping(XContentBuilder b) throws IOException { b.field("type", "scaled_float"); b.field("scaling_factor", scalingFactor); @@ -427,6 +437,16 @@ public List invalidExample() throws IOException { } } + @Override + protected Function loadBlockExpected() { + return v -> (Number) v; + } + + @Override + protected Matcher blockItemMatcher(Object expected) { + return "NaN".equals(expected) ? notANumber() : equalTo(expected); + } + @Override protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java index 49d3d04b3ee5c..595d845d40b3d 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -9,8 +9,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Requests; import org.elasticsearch.search.SearchHit; @@ -35,8 +33,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -45,116 +42,119 @@ public class ChildrenIT extends AbstractParentChildTestCase { public void testSimpleChildrenAgg() { - final SearchRequestBuilder searchRequest = client().prepareSearch("test") - .setQuery(matchQuery("randomized", true)) - .addAggregation(children("to_comment", "comment")); - final SearchResponse searchResponse = searchRequest.get(); long count = categoryToControl.values().stream().mapToLong(control -> control.commentIds.size()).sum(); - assertSearchResponse(searchResponse); - Children childrenAgg = searchResponse.getAggregations().get("to_comment"); - assertThat("Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", childrenAgg.getDocCount(), equalTo(count)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(matchQuery("randomized", true)).addAggregation(children("to_comment", "comment")), + response -> { + Children childrenAgg = response.getAggregations().get("to_comment"); + assertThat("Response: " + response + "\n", childrenAgg.getDocCount(), equalTo(count)); + } + ); } public void testChildrenAggs() { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("randomized", true)) - .addAggregation( - terms("category").field("category") - .size(10000) - .subAggregation( - children("to_comment", "comment").subAggregation( - terms("commenters").field("commenter").size(10000).subAggregation(topHits("top_comments")) + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(matchQuery("randomized", true)) + .addAggregation( + terms("category").field("category") + .size(10000) + .subAggregation( + children("to_comment", "comment").subAggregation( + terms("commenters").field("commenter").size(10000).subAggregation(topHits("top_comments")) + ) ) - ) - ) - .get(); - assertSearchResponse(searchResponse); - - Terms categoryTerms = searchResponse.getAggregations().get("category"); - assertThat(categoryTerms.getBuckets().size(), equalTo(categoryToControl.size())); - for (Map.Entry entry1 : categoryToControl.entrySet()) { - Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry1.getKey()); - assertThat(categoryBucket.getKeyAsString(), equalTo(entry1.getKey())); - assertThat(categoryBucket.getDocCount(), equalTo((long) entry1.getValue().articleIds.size())); - - Children childrenBucket = categoryBucket.getAggregations().get("to_comment"); - assertThat(childrenBucket.getName(), equalTo("to_comment")); - assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size())); - assertThat(((InternalAggregation) childrenBucket).getProperty("_count"), equalTo((long) entry1.getValue().commentIds.size())); - - Terms commentersTerms = childrenBucket.getAggregations().get("commenters"); - assertThat(((InternalAggregation) childrenBucket).getProperty("commenters"), sameInstance(commentersTerms)); - assertThat(commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size())); - for (Map.Entry> entry2 : entry1.getValue().commenterToCommentId.entrySet()) { - Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey()); - assertThat(commentBucket.getKeyAsString(), equalTo(entry2.getKey())); - assertThat(commentBucket.getDocCount(), equalTo((long) entry2.getValue().size())); - - TopHits topHits = commentBucket.getAggregations().get("top_comments"); - for (SearchHit searchHit : topHits.getHits().getHits()) { - assertThat(entry2.getValue().contains(searchHit.getId()), is(true)); + ), + response -> { + Terms categoryTerms = response.getAggregations().get("category"); + assertThat(categoryTerms.getBuckets().size(), equalTo(categoryToControl.size())); + for (Map.Entry entry1 : categoryToControl.entrySet()) { + Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry1.getKey()); + assertThat(categoryBucket.getKeyAsString(), equalTo(entry1.getKey())); + assertThat(categoryBucket.getDocCount(), equalTo((long) entry1.getValue().articleIds.size())); + + Children childrenBucket = categoryBucket.getAggregations().get("to_comment"); + assertThat(childrenBucket.getName(), equalTo("to_comment")); + assertThat(childrenBucket.getDocCount(), equalTo((long) entry1.getValue().commentIds.size())); + assertThat( + ((InternalAggregation) childrenBucket).getProperty("_count"), + equalTo((long) entry1.getValue().commentIds.size()) + ); + + Terms commentersTerms = childrenBucket.getAggregations().get("commenters"); + assertThat(((InternalAggregation) childrenBucket).getProperty("commenters"), sameInstance(commentersTerms)); + assertThat(commentersTerms.getBuckets().size(), equalTo(entry1.getValue().commenterToCommentId.size())); + for (Map.Entry> entry2 : entry1.getValue().commenterToCommentId.entrySet()) { + Terms.Bucket commentBucket = commentersTerms.getBucketByKey(entry2.getKey()); + assertThat(commentBucket.getKeyAsString(), equalTo(entry2.getKey())); + assertThat(commentBucket.getDocCount(), equalTo((long) entry2.getValue().size())); + + TopHits topHits = commentBucket.getAggregations().get("top_comments"); + for (SearchHit searchHit : topHits.getHits().getHits()) { + assertThat(entry2.getValue().contains(searchHit.getId()), is(true)); + } + } } } - } + ); } public void testParentWithMultipleBuckets() { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchQuery("randomized", false)) - .addAggregation( - terms("category").field("category") - .size(10000) - .subAggregation(children("to_comment", "comment").subAggregation(topHits("top_comments").sort("id", SortOrder.ASC))) - ) - .get(); - assertSearchResponse(searchResponse); - - Terms categoryTerms = searchResponse.getAggregations().get("category"); - assertThat(categoryTerms.getBuckets().size(), equalTo(3)); - - for (Terms.Bucket bucket : categoryTerms.getBuckets()) { - logger.info("bucket={}", bucket.getKey()); - Children childrenBucket = bucket.getAggregations().get("to_comment"); - TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - logger.info("total_hits={}", topHits.getHits().getTotalHits().value); - for (SearchHit searchHit : topHits.getHits()) { - logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); - } - } + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(matchQuery("randomized", false)) + .addAggregation( + terms("category").field("category") + .size(10000) + .subAggregation(children("to_comment", "comment").subAggregation(topHits("top_comments").sort("id", SortOrder.ASC))) + ), + response -> { + Terms categoryTerms = response.getAggregations().get("category"); + assertThat(categoryTerms.getBuckets().size(), equalTo(3)); + + for (Terms.Bucket bucket : categoryTerms.getBuckets()) { + logger.info("bucket={}", bucket.getKey()); + Children childrenBucket = bucket.getAggregations().get("to_comment"); + TopHits topHits = childrenBucket.getAggregations().get("top_comments"); + logger.info("total_hits={}", topHits.getHits().getTotalHits().value); + for (SearchHit searchHit : topHits.getHits()) { + logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); + } + } - Terms.Bucket categoryBucket = categoryTerms.getBucketByKey("a"); - assertThat(categoryBucket.getKeyAsString(), equalTo("a")); - assertThat(categoryBucket.getDocCount(), equalTo(3L)); - - Children childrenBucket = categoryBucket.getAggregations().get("to_comment"); - assertThat(childrenBucket.getName(), equalTo("to_comment")); - assertThat(childrenBucket.getDocCount(), equalTo(2L)); - TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); - assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); - assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); - - categoryBucket = categoryTerms.getBucketByKey("b"); - assertThat(categoryBucket.getKeyAsString(), equalTo("b")); - assertThat(categoryBucket.getDocCount(), equalTo(2L)); - - childrenBucket = categoryBucket.getAggregations().get("to_comment"); - assertThat(childrenBucket.getName(), equalTo("to_comment")); - assertThat(childrenBucket.getDocCount(), equalTo(1L)); - topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); - assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); - - categoryBucket = categoryTerms.getBucketByKey("c"); - assertThat(categoryBucket.getKeyAsString(), equalTo("c")); - assertThat(categoryBucket.getDocCount(), equalTo(2L)); - - childrenBucket = categoryBucket.getAggregations().get("to_comment"); - assertThat(childrenBucket.getName(), equalTo("to_comment")); - assertThat(childrenBucket.getDocCount(), equalTo(1L)); - topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); - assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); + Terms.Bucket categoryBucket = categoryTerms.getBucketByKey("a"); + assertThat(categoryBucket.getKeyAsString(), equalTo("a")); + assertThat(categoryBucket.getDocCount(), equalTo(3L)); + + Children childrenBucket = categoryBucket.getAggregations().get("to_comment"); + assertThat(childrenBucket.getName(), equalTo("to_comment")); + assertThat(childrenBucket.getDocCount(), equalTo(2L)); + TopHits topHits = childrenBucket.getAggregations().get("top_comments"); + assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); + assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); + assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); + + categoryBucket = categoryTerms.getBucketByKey("b"); + assertThat(categoryBucket.getKeyAsString(), equalTo("b")); + assertThat(categoryBucket.getDocCount(), equalTo(2L)); + + childrenBucket = categoryBucket.getAggregations().get("to_comment"); + assertThat(childrenBucket.getName(), equalTo("to_comment")); + assertThat(childrenBucket.getDocCount(), equalTo(1L)); + topHits = childrenBucket.getAggregations().get("top_comments"); + assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); + + categoryBucket = categoryTerms.getBucketByKey("c"); + assertThat(categoryBucket.getKeyAsString(), equalTo("c")); + assertThat(categoryBucket.getDocCount(), equalTo(2L)); + + childrenBucket = categoryBucket.getAggregations().get("to_comment"); + assertThat(childrenBucket.getName(), equalTo("to_comment")); + assertThat(childrenBucket.getDocCount(), equalTo(1L)); + topHits = childrenBucket.getAggregations().get("top_comments"); + assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); + } + ); } public void testWithDeletes() throws Exception { @@ -174,16 +174,16 @@ public void testWithDeletes() throws Exception { indexRandom(true, requests); for (int i = 0; i < 10; i++) { - SearchResponse searchResponse = client().prepareSearch(indexName) - .addAggregation(children("children", "child").subAggregation(sum("counts").field("count"))) - .get(); - - assertNoFailures(searchResponse); - Children children = searchResponse.getAggregations().get("children"); - assertThat(children.getDocCount(), equalTo(4L)); - - Sum count = children.getAggregations().get("counts"); - assertThat(count.value(), equalTo(4.)); + assertNoFailuresAndResponse( + prepareSearch(indexName).addAggregation(children("children", "child").subAggregation(sum("counts").field("count"))), + response -> { + Children children = response.getAggregations().get("children"); + assertThat(children.getDocCount(), equalTo(4L)); + + Sum count = children.getAggregations().get("counts"); + assertThat(count.value(), equalTo(4.)); + } + ); String idToUpdate = Integer.toString(2 + randomInt(3)); /* @@ -203,12 +203,11 @@ public void testWithDeletes() throws Exception { } public void testNonExistingChildType() throws Exception { - SearchResponse searchResponse = client().prepareSearch("test").addAggregation(children("non-existing", "xyz")).get(); - assertSearchResponse(searchResponse); - - Children children = searchResponse.getAggregations().get("non-existing"); - assertThat(children.getName(), equalTo("non-existing")); - assertThat(children.getDocCount(), equalTo(0L)); + assertNoFailuresAndResponse(prepareSearch("test").addAggregation(children("non-existing", "xyz")), response -> { + Children children = response.getAggregations().get("non-existing"); + assertThat(children.getName(), equalTo("non-existing")); + assertThat(children.getDocCount(), equalTo(0L)); + }); } public void testPostCollection() throws Exception { @@ -255,34 +254,35 @@ public void testPostCollection() throws Exception { requests.add(createIndexRequest(indexName, childType, "16", "2", "color", "green", "size", "44")); indexRandom(true, requests); - SearchResponse response = client().prepareSearch(indexName) - .setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None)) - .addAggregation( - children("my-refinements", childType).subAggregation(terms("my-colors").field("color")) - .subAggregation(terms("my-sizes").field("size")) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - - Children childrenAgg = response.getAggregations().get("my-refinements"); - assertThat(childrenAgg.getDocCount(), equalTo(7L)); - - Terms termsAgg = childrenAgg.getAggregations().get("my-colors"); - assertThat(termsAgg.getBuckets().size(), equalTo(4)); - assertThat(termsAgg.getBucketByKey("black").getDocCount(), equalTo(3L)); - assertThat(termsAgg.getBucketByKey("blue").getDocCount(), equalTo(2L)); - assertThat(termsAgg.getBucketByKey("green").getDocCount(), equalTo(1L)); - assertThat(termsAgg.getBucketByKey("orange").getDocCount(), equalTo(1L)); - - termsAgg = childrenAgg.getAggregations().get("my-sizes"); - assertThat(termsAgg.getBuckets().size(), equalTo(6)); - assertThat(termsAgg.getBucketByKey("36").getDocCount(), equalTo(2L)); - assertThat(termsAgg.getBucketByKey("32").getDocCount(), equalTo(1L)); - assertThat(termsAgg.getBucketByKey("34").getDocCount(), equalTo(1L)); - assertThat(termsAgg.getBucketByKey("38").getDocCount(), equalTo(1L)); - assertThat(termsAgg.getBucketByKey("40").getDocCount(), equalTo(1L)); - assertThat(termsAgg.getBucketByKey("44").getDocCount(), equalTo(1L)); + assertNoFailuresAndResponse( + prepareSearch(indexName).setQuery(hasChildQuery(childType, termQuery("color", "orange"), ScoreMode.None)) + .addAggregation( + children("my-refinements", childType).subAggregation(terms("my-colors").field("color")) + .subAggregation(terms("my-sizes").field("size")) + ), + response -> { + assertHitCount(response, 1L); + + Children childrenAgg = response.getAggregations().get("my-refinements"); + assertThat(childrenAgg.getDocCount(), equalTo(7L)); + + Terms termsAgg = childrenAgg.getAggregations().get("my-colors"); + assertThat(termsAgg.getBuckets().size(), equalTo(4)); + assertThat(termsAgg.getBucketByKey("black").getDocCount(), equalTo(3L)); + assertThat(termsAgg.getBucketByKey("blue").getDocCount(), equalTo(2L)); + assertThat(termsAgg.getBucketByKey("green").getDocCount(), equalTo(1L)); + assertThat(termsAgg.getBucketByKey("orange").getDocCount(), equalTo(1L)); + + termsAgg = childrenAgg.getAggregations().get("my-sizes"); + assertThat(termsAgg.getBuckets().size(), equalTo(6)); + assertThat(termsAgg.getBucketByKey("36").getDocCount(), equalTo(2L)); + assertThat(termsAgg.getBucketByKey("32").getDocCount(), equalTo(1L)); + assertThat(termsAgg.getBucketByKey("34").getDocCount(), equalTo(1L)); + assertThat(termsAgg.getBucketByKey("38").getDocCount(), equalTo(1L)); + assertThat(termsAgg.getBucketByKey("40").getDocCount(), equalTo(1L)); + assertThat(termsAgg.getBucketByKey("44").getDocCount(), equalTo(1L)); + } + ); } public void testHierarchicalChildrenAggs() { @@ -305,25 +305,28 @@ public void testHierarchicalChildrenAggs() { createIndexRequest(indexName, childType, "3", "2", "name", "brussels").setRouting("1").get(); refresh(); - SearchResponse response = client().prepareSearch(indexName) - .setQuery(matchQuery("name", "europe")) - .addAggregation( - children(parentType, parentType).subAggregation(children(childType, childType).subAggregation(terms("name").field("name"))) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - - Children children = response.getAggregations().get(parentType); - assertThat(children.getName(), equalTo(parentType)); - assertThat(children.getDocCount(), equalTo(1L)); - children = children.getAggregations().get(childType); - assertThat(children.getName(), equalTo(childType)); - assertThat(children.getDocCount(), equalTo(1L)); - Terms terms = children.getAggregations().get("name"); - assertThat(terms.getBuckets().size(), equalTo(1)); - assertThat(terms.getBuckets().get(0).getKey().toString(), equalTo("brussels")); - assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1L)); + assertNoFailuresAndResponse( + prepareSearch(indexName).setQuery(matchQuery("name", "europe")) + .addAggregation( + children(parentType, parentType).subAggregation( + children(childType, childType).subAggregation(terms("name").field("name")) + ) + ), + response -> { + assertHitCount(response, 1L); + + Children children = response.getAggregations().get(parentType); + assertThat(children.getName(), equalTo(parentType)); + assertThat(children.getDocCount(), equalTo(1L)); + children = children.getAggregations().get(childType); + assertThat(children.getName(), equalTo(childType)); + assertThat(children.getDocCount(), equalTo(1L)); + Terms terms = children.getAggregations().get("name"); + assertThat(terms.getBuckets().size(), equalTo(1)); + assertThat(terms.getBuckets().get(0).getKey().toString(), equalTo("brussels")); + assertThat(terms.getBuckets().get(0).getDocCount(), equalTo(1L)); + } + ); } public void testPostCollectAllLeafReaders() throws Exception { @@ -356,41 +359,42 @@ public void testPostCollectAllLeafReaders() throws Exception { requests.add(createIndexRequest("index", "childType", "8", "3", "name", "Dan", "age", 1)); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("index") - .setSize(0) - .addAggregation( - AggregationBuilders.terms("towns") - .field("town") - .subAggregation( - AggregationBuilders.terms("parent_names").field("name").subAggregation(children("child_docs", "childType")) - ) - ) - .get(); - - Terms towns = response.getAggregations().get("towns"); - assertThat(towns.getBuckets().size(), equalTo(2)); - assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago")); - assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L)); - - Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names"); - assertThat(parents.getBuckets().size(), equalTo(2)); - assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice")); - assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); - Children children = parents.getBuckets().get(0).getAggregations().get("child_docs"); - assertThat(children.getDocCount(), equalTo(1L)); - - assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill")); - assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L)); - children = parents.getBuckets().get(1).getAggregations().get("child_docs"); - assertThat(children.getDocCount(), equalTo(2L)); - - assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis")); - assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L)); - parents = towns.getBuckets().get(1).getAggregations().get("parent_names"); - assertThat(parents.getBuckets().size(), equalTo(1)); - assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob")); - assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); - children = parents.getBuckets().get(0).getAggregations().get("child_docs"); - assertThat(children.getDocCount(), equalTo(2L)); + assertNoFailuresAndResponse( + prepareSearch("index").setSize(0) + .addAggregation( + AggregationBuilders.terms("towns") + .field("town") + .subAggregation( + AggregationBuilders.terms("parent_names").field("name").subAggregation(children("child_docs", "childType")) + ) + ), + response -> { + Terms towns = response.getAggregations().get("towns"); + assertThat(towns.getBuckets().size(), equalTo(2)); + assertThat(towns.getBuckets().get(0).getKeyAsString(), equalTo("Chicago")); + assertThat(towns.getBuckets().get(0).getDocCount(), equalTo(2L)); + + Terms parents = towns.getBuckets().get(0).getAggregations().get("parent_names"); + assertThat(parents.getBuckets().size(), equalTo(2)); + assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Alice")); + assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); + Children children = parents.getBuckets().get(0).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(1L)); + + assertThat(parents.getBuckets().get(1).getKeyAsString(), equalTo("Bill")); + assertThat(parents.getBuckets().get(1).getDocCount(), equalTo(1L)); + children = parents.getBuckets().get(1).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(2L)); + + assertThat(towns.getBuckets().get(1).getKeyAsString(), equalTo("Memphis")); + assertThat(towns.getBuckets().get(1).getDocCount(), equalTo(1L)); + parents = towns.getBuckets().get(1).getAggregations().get("parent_names"); + assertThat(parents.getBuckets().size(), equalTo(1)); + assertThat(parents.getBuckets().get(0).getKeyAsString(), equalTo("Bob")); + assertThat(parents.getBuckets().get(0).getDocCount(), equalTo(1L)); + children = parents.getBuckets().get(0).getAggregations().get("child_docs"); + assertThat(children.getDocCount(), equalTo(2L)); + } + ); } } diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ParentIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ParentIT.java index 26a8d44759513..65c162e0b78bc 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ParentIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ParentIT.java @@ -8,8 +8,6 @@ package org.elasticsearch.join.aggregations; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -27,158 +25,144 @@ import static org.elasticsearch.join.aggregations.JoinAggregationBuilders.parent; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.AggregationBuilders.topHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.equalTo; public class ParentIT extends AbstractParentChildTestCase { public void testSimpleParentAgg() { - final SearchRequestBuilder searchRequest = client().prepareSearch("test") - .setSize(0) - .setQuery(matchQuery("randomized", true)) - .addAggregation(parent("to_article", "comment")); - SearchResponse searchResponse = searchRequest.get(); - - assertSearchResponse(searchResponse); long articlesWithComment = articleToControl.values() .stream() .filter(parentControl -> parentControl.commentIds.isEmpty() == false) .count(); - Parent parentAgg = searchResponse.getAggregations().get("to_article"); - assertThat( - "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - parentAgg.getDocCount(), - equalTo(articlesWithComment) + assertNoFailuresAndResponse( + prepareSearch("test").setSize(0).setQuery(matchQuery("randomized", true)).addAggregation(parent("to_article", "comment")), + response -> { + Parent parentAgg = response.getAggregations().get("to_article"); + assertThat("\nResponse: " + response + "\n", parentAgg.getDocCount(), equalTo(articlesWithComment)); + } ); } public void testSimpleParentAggWithSubAgg() { - final SearchRequestBuilder searchRequest = client().prepareSearch("test") - .setSize(10000) - .setQuery(matchQuery("randomized", true)) - .addAggregation(parent("to_article", "comment").subAggregation(terms("category").field("category").size(10000))); - SearchResponse searchResponse = searchRequest.get(); - assertSearchResponse(searchResponse); - long articlesWithComment = articleToControl.values() .stream() .filter(parentControl -> parentControl.commentIds.isEmpty() == false) .count(); - - Parent parentAgg = searchResponse.getAggregations().get("to_article"); - assertThat( - "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - parentAgg.getDocCount(), - equalTo(articlesWithComment) - ); - Terms categoryTerms = parentAgg.getAggregations().get("category"); long categoriesWithComments = categoryToControl.values().stream().filter(control -> control.commentIds.isEmpty() == false).count(); - assertThat( - "Buckets: " - + categoryTerms.getBuckets() - .stream() - .map((Function) MultiBucketsAggregation.Bucket::getKeyAsString) - .collect(Collectors.toList()) - + "\nCategories: " - + categoryToControl.keySet(), - (long) categoryTerms.getBuckets().size(), - equalTo(categoriesWithComments) - ); - for (Map.Entry entry : categoryToControl.entrySet()) { - // no children for this category -> no entry in the child to parent-aggregation - if (entry.getValue().commentIds.isEmpty()) { - assertNull(categoryTerms.getBucketByKey(entry.getKey())); - continue; + assertNoFailuresAndResponse( + prepareSearch("test").setSize(10000) + .setQuery(matchQuery("randomized", true)) + .addAggregation(parent("to_article", "comment").subAggregation(terms("category").field("category").size(10000))), + response -> { + Parent parentAgg = response.getAggregations().get("to_article"); + assertThat("Response: " + response + "\n", parentAgg.getDocCount(), equalTo(articlesWithComment)); + Terms categoryTerms = parentAgg.getAggregations().get("category"); + assertThat( + "Buckets: " + + categoryTerms.getBuckets() + .stream() + .map((Function) MultiBucketsAggregation.Bucket::getKeyAsString) + .collect(Collectors.toList()) + + "\nCategories: " + + categoryToControl.keySet(), + (long) categoryTerms.getBuckets().size(), + equalTo(categoriesWithComments) + ); + for (Map.Entry entry : categoryToControl.entrySet()) { + // no children for this category -> no entry in the child to parent-aggregation + if (entry.getValue().commentIds.isEmpty()) { + assertNull(categoryTerms.getBucketByKey(entry.getKey())); + continue; + } + + final Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry.getKey()); + assertNotNull("Failed for category " + entry.getKey(), categoryBucket); + assertThat("Failed for category " + entry.getKey(), categoryBucket.getKeyAsString(), equalTo(entry.getKey())); + + // count all articles in this category which have at least one comment + long articlesForCategory = articleToControl.values() + .stream() + // only articles with this category + .filter(parentControl -> parentControl.category.equals(entry.getKey())) + // only articles which have comments + .filter(parentControl -> parentControl.commentIds.isEmpty() == false) + .count(); + assertThat("Failed for category " + entry.getKey(), categoryBucket.getDocCount(), equalTo(articlesForCategory)); + } } - - final Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(entry.getKey()); - assertNotNull("Failed for category " + entry.getKey(), categoryBucket); - assertThat("Failed for category " + entry.getKey(), categoryBucket.getKeyAsString(), equalTo(entry.getKey())); - - // count all articles in this category which have at least one comment - long articlesForCategory = articleToControl.values() - .stream() - // only articles with this category - .filter(parentControl -> parentControl.category.equals(entry.getKey())) - // only articles which have comments - .filter(parentControl -> parentControl.commentIds.isEmpty() == false) - .count(); - assertThat("Failed for category " + entry.getKey(), categoryBucket.getDocCount(), equalTo(articlesForCategory)); - } + ); } public void testParentAggs() throws Exception { - final SearchRequestBuilder searchRequest = client().prepareSearch("test") - .setSize(10000) - .setQuery(matchQuery("randomized", true)) - .addAggregation( - terms("to_commenter").field("commenter") - .size(10000) - .subAggregation( - parent("to_article", "comment").subAggregation( - terms("to_category").field("category").size(10000).subAggregation(topHits("top_category")) + assertNoFailuresAndResponse( + prepareSearch("test").setSize(10000) + .setQuery(matchQuery("randomized", true)) + .addAggregation( + terms("to_commenter").field("commenter") + .size(10000) + .subAggregation( + parent("to_article", "comment").subAggregation( + terms("to_category").field("category").size(10000).subAggregation(topHits("top_category")) + ) ) - ) - ); - SearchResponse searchResponse = searchRequest.get(); - assertSearchResponse(searchResponse); - - final Set commenters = getCommenters(); - final Map> commenterToComments = getCommenterToComments(); + ), + response -> { + final Set commenters = getCommenters(); + final Map> commenterToComments = getCommenterToComments(); + + Terms categoryTerms = response.getAggregations().get("to_commenter"); + assertThat("Response: " + response + "\n", categoryTerms.getBuckets().size(), equalTo(commenters.size())); + for (Terms.Bucket commenterBucket : categoryTerms.getBuckets()) { + Set comments = commenterToComments.get(commenterBucket.getKeyAsString()); + assertNotNull(comments); + assertThat( + "Failed for commenter " + commenterBucket.getKeyAsString(), + commenterBucket.getDocCount(), + equalTo((long) comments.size()) + ); + + Parent articleAgg = commenterBucket.getAggregations().get("to_article"); + assertThat(articleAgg.getName(), equalTo("to_article")); + // find all articles for the comments for the current commenter + Set articles = articleToControl.values() + .stream() + .flatMap( + (Function>) parentControl -> parentControl.commentIds.stream() + .filter(comments::contains) + ) + .collect(Collectors.toSet()); - Terms categoryTerms = searchResponse.getAggregations().get("to_commenter"); - assertThat( - "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - categoryTerms.getBuckets().size(), - equalTo(commenters.size()) - ); - for (Terms.Bucket commenterBucket : categoryTerms.getBuckets()) { - Set comments = commenterToComments.get(commenterBucket.getKeyAsString()); - assertNotNull(comments); - assertThat( - "Failed for commenter " + commenterBucket.getKeyAsString(), - commenterBucket.getDocCount(), - equalTo((long) comments.size()) - ); + assertThat(articleAgg.getDocCount(), equalTo((long) articles.size())); - Parent articleAgg = commenterBucket.getAggregations().get("to_article"); - assertThat(articleAgg.getName(), equalTo("to_article")); - // find all articles for the comments for the current commenter - Set articles = articleToControl.values() - .stream() - .flatMap( - (Function>) parentControl -> parentControl.commentIds.stream().filter(comments::contains) - ) - .collect(Collectors.toSet()); + Terms categoryAgg = articleAgg.getAggregations().get("to_category"); + assertNotNull(categoryAgg); - assertThat(articleAgg.getDocCount(), equalTo((long) articles.size())); + List categories = categoryToControl.entrySet() + .stream() + .filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); - Terms categoryAgg = articleAgg.getAggregations().get("to_category"); - assertNotNull(categoryAgg); + for (String category : categories) { + Terms.Bucket categoryBucket = categoryAgg.getBucketByKey(category); + assertNotNull(categoryBucket); - List categories = categoryToControl.entrySet() - .stream() - .filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + Aggregation topCategory = categoryBucket.getAggregations().get("top_category"); + assertNotNull(topCategory); + } + } - for (String category : categories) { - Terms.Bucket categoryBucket = categoryAgg.getBucketByKey(category); - assertNotNull(categoryBucket); + for (String commenter : commenters) { + Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(commenter); + assertThat(categoryBucket.getKeyAsString(), equalTo(commenter)); + assertThat(categoryBucket.getDocCount(), equalTo((long) commenterToComments.get(commenter).size())); - Aggregation topCategory = categoryBucket.getAggregations().get("top_category"); - assertNotNull(topCategory); + Parent childrenBucket = categoryBucket.getAggregations().get("to_article"); + assertThat(childrenBucket.getName(), equalTo("to_article")); + } } - } - - for (String commenter : commenters) { - Terms.Bucket categoryBucket = categoryTerms.getBucketByKey(commenter); - assertThat(categoryBucket.getKeyAsString(), equalTo(commenter)); - assertThat(categoryBucket.getDocCount(), equalTo((long) commenterToComments.get(commenter).size())); - - Parent childrenBucket = categoryBucket.getAggregations().get("to_article"); - assertThat(childrenBucket.getName(), equalTo("to_article")); - } + ); } private Set getCommenters() { @@ -200,69 +184,65 @@ private Map> getCommenterToComments() { } public void testNonExistingParentType() throws Exception { - SearchResponse searchResponse = client().prepareSearch("test").addAggregation(parent("non-existing", "xyz")).get(); - assertSearchResponse(searchResponse); - - Parent parent = searchResponse.getAggregations().get("non-existing"); - assertThat(parent.getName(), equalTo("non-existing")); - assertThat(parent.getDocCount(), equalTo(0L)); + assertNoFailuresAndResponse(prepareSearch("test").addAggregation(parent("non-existing", "xyz")), response -> { + Parent parent = response.getAggregations().get("non-existing"); + assertThat(parent.getName(), equalTo("non-existing")); + assertThat(parent.getDocCount(), equalTo(0L)); + }); } public void testTermsParentAggTerms() throws Exception { - final SearchRequestBuilder searchRequest = client().prepareSearch("test") - .setSize(10000) - .setQuery(matchQuery("randomized", true)) - .addAggregation( - terms("to_commenter").field("commenter") - .size(10000) - .subAggregation(parent("to_article", "comment").subAggregation(terms("to_category").field("category").size(10000))) - ); - SearchResponse searchResponse = searchRequest.get(); - assertSearchResponse(searchResponse); - - final Set commenters = getCommenters(); - final Map> commenterToComments = getCommenterToComments(); - - Terms commentersAgg = searchResponse.getAggregations().get("to_commenter"); - assertThat( - "Request: " + searchRequest + "\nResponse: " + searchResponse + "\n", - commentersAgg.getBuckets().size(), - equalTo(commenters.size()) - ); - for (Terms.Bucket commenterBucket : commentersAgg.getBuckets()) { - Set comments = commenterToComments.get(commenterBucket.getKeyAsString()); - assertNotNull(comments); - assertThat( - "Failed for commenter " + commenterBucket.getKeyAsString(), - commenterBucket.getDocCount(), - equalTo((long) comments.size()) - ); - - Parent articleAgg = commenterBucket.getAggregations().get("to_article"); - assertThat(articleAgg.getName(), equalTo("to_article")); - // find all articles for the comments for the current commenter - Set articles = articleToControl.values() - .stream() - .flatMap( - (Function>) parentControl -> parentControl.commentIds.stream().filter(comments::contains) - ) - .collect(Collectors.toSet()); + assertNoFailuresAndResponse( + prepareSearch("test").setSize(10000) + .setQuery(matchQuery("randomized", true)) + .addAggregation( + terms("to_commenter").field("commenter") + .size(10000) + .subAggregation(parent("to_article", "comment").subAggregation(terms("to_category").field("category").size(10000))) + ), + response -> { + final Set commenters = getCommenters(); + final Map> commenterToComments = getCommenterToComments(); + + Terms commentersAgg = response.getAggregations().get("to_commenter"); + assertThat("Response: " + response + "\n", commentersAgg.getBuckets().size(), equalTo(commenters.size())); + for (Terms.Bucket commenterBucket : commentersAgg.getBuckets()) { + Set comments = commenterToComments.get(commenterBucket.getKeyAsString()); + assertNotNull(comments); + assertThat( + "Failed for commenter " + commenterBucket.getKeyAsString(), + commenterBucket.getDocCount(), + equalTo((long) comments.size()) + ); + + Parent articleAgg = commenterBucket.getAggregations().get("to_article"); + assertThat(articleAgg.getName(), equalTo("to_article")); + // find all articles for the comments for the current commenter + Set articles = articleToControl.values() + .stream() + .flatMap( + (Function>) parentControl -> parentControl.commentIds.stream() + .filter(comments::contains) + ) + .collect(Collectors.toSet()); - assertThat(articleAgg.getDocCount(), equalTo((long) articles.size())); + assertThat(articleAgg.getDocCount(), equalTo((long) articles.size())); - Terms categoryAgg = articleAgg.getAggregations().get("to_category"); - assertNotNull(categoryAgg); + Terms categoryAgg = articleAgg.getAggregations().get("to_category"); + assertNotNull(categoryAgg); - List categories = categoryToControl.entrySet() - .stream() - .filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())) - .map(Map.Entry::getKey) - .collect(Collectors.toList()); + List categories = categoryToControl.entrySet() + .stream() + .filter(entry -> entry.getValue().commenterToCommentId.containsKey(commenterBucket.getKeyAsString())) + .map(Map.Entry::getKey) + .collect(Collectors.toList()); - for (String category : categories) { - Terms.Bucket categoryBucket = categoryAgg.getBucketByKey(category); - assertNotNull(categoryBucket); + for (String category : categories) { + Terms.Bucket categoryBucket = categoryAgg.getBucketByKey(category); + assertNotNull(categoryBucket); + } + } } - } + ); } } diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index baa4a4459f408..34ead2c21480b 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; @@ -64,7 +65,10 @@ import static org.elasticsearch.join.query.JoinQueryBuilders.parentId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -89,8 +93,8 @@ public void testMultiLevelChild() throws Exception { createIndexRequest("test", "grandchild", "gc1", "c1", "gc_field", "gc_value1").setRouting("p1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( boolQuery().must(matchAllQuery()) .filter( hasChildQuery( @@ -100,43 +104,48 @@ public void testMultiLevelChild() throws Exception { ScoreMode.None ) ) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))) - .execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); - - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))) - .execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("gc1")); - - searchResponse = client().prepareSearch("test") - .setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) - .execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); - - searchResponse = client().prepareSearch("test") - .setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) - .execute() - .actionGet(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("gc1")); + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); + } + ); } // see #2744 @@ -148,13 +157,13 @@ public void test2744() throws IOException { createIndexRequest("test", "foo", "1", null, "foo", 1).get(); createIndexRequest("test", "test", "2", "1", "foo", 1).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testSimpleChildQuery() throws Exception { @@ -171,53 +180,60 @@ public void testSimpleChildQuery() throws Exception { refresh(); // TEST FETCHING _parent from child - SearchResponse searchResponse; - searchResponse = client().prepareSearch("test").setQuery(idsQuery().addIds("c1")).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); - assertThat(extractValue("join_field.name", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); + assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); + assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + + }); // TEST matching on parent - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); - assertThat(extractValue("join_field.name", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); - assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2"))); - assertThat(extractValue("join_field.name", searchResponse.getHits().getAt(1).getSourceAsMap()), equalTo("child")); - assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(1).getSourceAsMap()), equalTo("p1")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); + assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); + assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); + assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2"))); + assertThat(extractValue("join_field.name", response.getHits().getAt(1).getSourceAsMap()), equalTo("child")); + assertThat(extractValue("join_field.parent", response.getHits().getAt(1).getSourceAsMap()), equalTo("p1")); + } + ); // HAS CHILD - searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")).get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - - searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")).execute().actionGet(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); - - searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")).get(); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); - assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + }); + + assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "blue")), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); + }); + + assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "red")), response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + }); // HAS PARENT - searchResponse = client().prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value2")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c3")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c4")); - - searchResponse = client().prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value1")).get(); - assertHitCount(searchResponse, 2L); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c2")); + assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value2")), response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); + assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); + }); + + assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasParent("parent", "p_field", "p_value1")), response -> { + assertHitCount(response, 2L); + assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("c2")); + }); } // Issue #3290 @@ -247,10 +263,8 @@ public void testCachingBugWithFqueryFilter() throws Exception { for (int i = 1; i <= 10; i++) { logger.info("Round {}", i); - assertNoFailures( - client().prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Max))) - ); - assertNoFailures(client().prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery(), true)))); + assertNoFailures(prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Max)))); + assertNoFailures(prepareSearch("test").setQuery(constantScoreQuery(hasParentQuery("parent", matchAllQuery(), true)))); } } @@ -286,19 +300,21 @@ public void testHasParentFilter() throws Exception { assertThat(parentToChildren.isEmpty(), equalTo(false)); for (Map.Entry> parentToChildrenEntry : parentToChildren.entrySet()) { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false))) - .setSize(numChildDocsPerParent) - .get(); - - assertNoFailures(searchResponse); - Set childIds = parentToChildrenEntry.getValue(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) childIds.size())); - for (int i = 0; i < searchResponse.getHits().getTotalHits().value; i++) { - assertThat(childIds.remove(searchResponse.getHits().getAt(i).getId()), is(true)); - assertThat(searchResponse.getHits().getAt(i).getScore(), is(1.0f)); - } - assertThat(childIds.size(), is(0)); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + constantScoreQuery(hasParentQuery("parent", termQuery("p_field", parentToChildrenEntry.getKey()), false)) + ).setSize(numChildDocsPerParent), + response -> { + assertNoFailures(response); + Set childIds = parentToChildrenEntry.getValue(); + assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); + for (int i = 0; i < response.getHits().getTotalHits().value; i++) { + assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true)); + assertThat(response.getHits().getAt(i).getScore(), is(1.0f)); + } + assertThat(childIds.size(), is(0)); + } + ); } } @@ -322,49 +338,56 @@ public void testSimpleChildQueryWithFlush() throws Exception { refresh(); // HAS CHILD QUERY + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + } + ); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - - searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); + } + ); - searchResponse = client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)).get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); - assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + } + ); // HAS CHILD FILTER - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); - assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + } + ); } public void testScopedFacet() throws Exception { @@ -385,37 +408,38 @@ public void testScopedFacet() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( hasChildQuery( "child", boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")), ScoreMode.None ) ) - .addAggregation( - AggregationBuilders.global("global") - .subAggregation( - AggregationBuilders.filter( - "filter", - boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")) - ).subAggregation(AggregationBuilders.terms("facet1").field("c_field")) - ) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); - assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); - - Global global = searchResponse.getAggregations().get("global"); - Filter filter = global.getAggregations().get("filter"); - Terms termsFacet = filter.getAggregations().get("facet1"); - assertThat(termsFacet.getBuckets().size(), equalTo(2)); - assertThat(termsFacet.getBuckets().get(0).getKeyAsString(), equalTo("red")); - assertThat(termsFacet.getBuckets().get(0).getDocCount(), equalTo(2L)); - assertThat(termsFacet.getBuckets().get(1).getKeyAsString(), equalTo("yellow")); - assertThat(termsFacet.getBuckets().get(1).getDocCount(), equalTo(1L)); + .addAggregation( + AggregationBuilders.global("global") + .subAggregation( + AggregationBuilders.filter( + "filter", + boolQuery().should(termQuery("c_field", "red")).should(termQuery("c_field", "yellow")) + ).subAggregation(AggregationBuilders.terms("facet1").field("c_field")) + ) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); + + Global global = response.getAggregations().get("global"); + Filter filter = global.getAggregations().get("filter"); + Terms termsFacet = filter.getAggregations().get("facet1"); + assertThat(termsFacet.getBuckets().size(), equalTo(2)); + assertThat(termsFacet.getBuckets().get(0).getKeyAsString(), equalTo("red")); + assertThat(termsFacet.getBuckets().get(0).getDocCount(), equalTo(2L)); + assertThat(termsFacet.getBuckets().get(1).getKeyAsString(), equalTo("yellow")); + assertThat(termsFacet.getBuckets().get(1).getDocCount(), equalTo(1L)); + } + ); } public void testDeletedParent() throws Exception { @@ -431,26 +455,28 @@ public void testDeletedParent() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); + } + ); // update p1 and see what that we get updated values... createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1_updated").get(); indicesAdmin().prepareRefresh().get(); - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); + } + ); } public void testDfsSearchType() throws Exception { @@ -468,14 +494,12 @@ public void testDfsSearchType() throws Exception { refresh(); assertNoFailures( - client().prepareSearch("test") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(boolQuery().mustNot(hasChildQuery("child", boolQuery().should(queryStringQuery("c_field:*")), ScoreMode.None))) ); assertNoFailures( - client().prepareSearch("test") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(boolQuery().mustNot(hasParentQuery("parent", boolQuery().should(queryStringQuery("p_field:*")), false))) ); } @@ -490,17 +514,17 @@ public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrCh client().prepareIndex("test").setId("3").setSource("p_field", 1).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchAllQuery(), ScoreMode.None)) + ), + 1L + ); - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false))), + 1L + ); } public void testCountApiUsage() throws Exception { @@ -512,24 +536,18 @@ public void testCountApiUsage() throws Exception { createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get(); refresh(); - assertHitCount( - client().prepareSearch("test").setSize(0).setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)), - 1L - ); + assertHitCount(prepareSearch("test").setSize(0).setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)), 1L); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)), 1L); + assertHitCount(prepareSearch("test").setSize(0).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)), 1L); assertHitCount( - client().prepareSearch("test") - .setSize(0) + prepareSearch("test").setSize(0) .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None))), 1L ); assertHitCount( - client().prepareSearch("test") - .setSize(0) - .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false))), + prepareSearch("test").setSize(0).setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false))), 1L ); } @@ -543,19 +561,21 @@ public void testExplainUsage() throws Exception { createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setExplain(true) - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); + assertResponse( + prepareSearch("test").setExplain(true).setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); + } + ); - searchResponse = client().prepareSearch("test") - .setExplain(true) - .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); + assertResponse( + prepareSearch("test").setExplain(true).setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1")); + } + ); ExplainResponse explainResponse = client().prepareExplain("test", parentId) .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)) @@ -617,91 +637,93 @@ public void testScoreForParentChildQueriesWithFunctionScore() throws Exception { ensureGreen(); indexRandom(true, createDocBuilders().toArray(new IndexRequestBuilder[0])); - SearchResponse response = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( hasChildQuery( "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1")) .boostMode(CombineFunction.REPLACE), ScoreMode.Total ) - ) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(4f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(3f)); + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(4f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(3f)); + } + ); - response = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( hasChildQuery( "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1")) .boostMode(CombineFunction.REPLACE), ScoreMode.Max ) - ) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(2f)); + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(2f)); + } + ); - response = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( hasChildQuery( "child", QueryBuilders.functionScoreQuery(matchQuery("c_field2", 0), fieldValueFactorFunction("c_field1")) .boostMode(CombineFunction.REPLACE), ScoreMode.Avg ) - ) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1.5f)); + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("1")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1.5f)); + } + ); - response = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( hasParentQuery( "parent", QueryBuilders.functionScoreQuery(matchQuery("p_field1", "p_value3"), fieldValueFactorFunction("p_field2")) .boostMode(CombineFunction.REPLACE), true ) - ) - .addSort(SortBuilders.fieldSort("c_field3")) - .addSort(SortBuilders.scoreSort()) - .get(); - - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(5f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("18")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(5f)); - assertThat(response.getHits().getHits()[3].getId(), equalTo("19")); - assertThat(response.getHits().getHits()[3].getScore(), equalTo(5f)); - assertThat(response.getHits().getHits()[4].getId(), equalTo("20")); - assertThat(response.getHits().getHits()[4].getScore(), equalTo(5f)); - assertThat(response.getHits().getHits()[5].getId(), equalTo("21")); - assertThat(response.getHits().getHits()[5].getScore(), equalTo(5f)); - assertThat(response.getHits().getHits()[6].getId(), equalTo("22")); - assertThat(response.getHits().getHits()[6].getScore(), equalTo(5f)); + ).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(5f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("18")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(5f)); + assertThat(response.getHits().getHits()[3].getId(), equalTo("19")); + assertThat(response.getHits().getHits()[3].getScore(), equalTo(5f)); + assertThat(response.getHits().getHits()[4].getId(), equalTo("20")); + assertThat(response.getHits().getHits()[4].getScore(), equalTo(5f)); + assertThat(response.getHits().getHits()[5].getId(), equalTo("21")); + assertThat(response.getHits().getHits()[5].getScore(), equalTo(5f)); + assertThat(response.getHits().getHits()[6].getId(), equalTo("22")); + assertThat(response.getHits().getHits()[6].getScore(), equalTo(5f)); + } + ); } // Issue #2536 @@ -709,32 +731,26 @@ public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Excep assertAcked(prepareCreate("test").setMapping(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"))); ensureGreen(); - SearchResponse response = client().prepareSearch("test") - .setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)) - .get(); - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)), + 0L + ); client().prepareIndex("test") .setSource(jsonBuilder().startObject().field("text", "value").endObject()) .setRefreshPolicy(RefreshPolicy.IMMEDIATE) .get(); - response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get(); - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)), + 0L + ); - response = client().prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max)).get(); - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.Max)), 0L); - response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)).get(); - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), false)), 0L); - response = client().prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true)).get(); - assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(hasParentQuery("parent", matchQuery("text", "value"), true)), 0L); } public void testHasChildAndHasParentFilter_withFilter() throws Exception { @@ -748,19 +764,25 @@ public void testHasChildAndHasParentFilter_withFilter() throws Exception { client().prepareIndex("test").setId("3").setSource("p_field", 2).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("2")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + } + ); } public void testHasChildInnerHitsHighlighting() throws Exception { @@ -771,22 +793,23 @@ public void testHasChildInnerHitsHighlighting() throws Exception { createIndexRequest("test", "child", "2", "1", "c_field", "foo bar").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( hasChildQuery("child", matchQuery("c_field", "foo"), ScoreMode.None).innerHit( new InnerHitBuilder().setHighlightBuilder( new HighlightBuilder().field(new Field("c_field").highlightQuery(QueryBuilders.matchQuery("c_field", "bar"))) ) ) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("1")); - SearchHit[] searchHits = searchResponse.getHits().getHits()[0].getInnerHits().get("child").getHits(); - assertThat(searchHits.length, equalTo(1)); - assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments().length, equalTo(1)); - assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments()[0].string(), equalTo("foo bar")); + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); + SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits(); + assertThat(searchHits.length, equalTo(1)); + assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments().length, equalTo(1)); + assertThat(searchHits[0].getHighlightFields().get("c_field").getFragments()[0].string(), equalTo("foo bar")); + } + ); } public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { @@ -799,27 +822,33 @@ public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception { createIndexRequest("test", "child", "2", "1", "c_field", 1).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))) - .get(); - assertSearchHit(searchResponse, 1, hasId("1")); + assertResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None)) + ), + response -> assertSearchHit(response, 1, hasId("1")) + ); - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchQuery("p_field", 1), false))) - .get(); - assertSearchHit(searchResponse, 1, hasId("2")); + assertResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchQuery("p_field", 1), false)) + ), + response -> assertSearchHit(response, 1, hasId("2")) + ); - searchResponse = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", 1), ScoreMode.None))) - ) - .get(); - assertSearchHit(searchResponse, 1, hasId("1")); + ), + response -> assertSearchHit(response, 1, hasId("1")) + ); - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false)))) - .get(); - assertSearchHit(searchResponse, 1, hasId("2")); + assertResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).filter(boolQuery().must(hasParentQuery("parent", matchQuery("p_field", 1), false))) + ), + response -> assertSearchHit(response, 1, hasId("2")) + ); } public void testSimpleQueryRewrite() throws Exception { @@ -852,33 +881,35 @@ public void testSimpleQueryRewrite() throws Exception { SearchType[] searchTypes = new SearchType[] { SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH }; for (SearchType searchType : searchTypes) { - SearchResponse searchResponse = client().prepareSearch("test") - .setSearchType(searchType) - .setQuery(hasChildQuery("child", prefixQuery("c_field", "c"), ScoreMode.Max)) - .addSort("p_field", SortOrder.ASC) - .setSize(5) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(10L)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("p000")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("p001")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("p002")); - assertThat(searchResponse.getHits().getHits()[3].getId(), equalTo("p003")); - assertThat(searchResponse.getHits().getHits()[4].getId(), equalTo("p004")); - - searchResponse = client().prepareSearch("test") - .setSearchType(searchType) - .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p"), true)) - .addSort("c_field", SortOrder.ASC) - .setSize(5) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(500L)); - assertThat(searchResponse.getHits().getHits()[0].getId(), equalTo("c000")); - assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("c001")); - assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("c002")); - assertThat(searchResponse.getHits().getHits()[3].getId(), equalTo("c003")); - assertThat(searchResponse.getHits().getHits()[4].getId(), equalTo("c004")); + assertNoFailuresAndResponse( + prepareSearch("test").setSearchType(searchType) + .setQuery(hasChildQuery("child", prefixQuery("c_field", "c"), ScoreMode.Max)) + .addSort("p_field", SortOrder.ASC) + .setSize(5), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("p000")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("p001")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("p002")); + assertThat(response.getHits().getHits()[3].getId(), equalTo("p003")); + assertThat(response.getHits().getHits()[4].getId(), equalTo("p004")); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("test").setSearchType(searchType) + .setQuery(hasParentQuery("parent", prefixQuery("p_field", "p"), true)) + .addSort("c_field", SortOrder.ASC) + .setSize(5), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(500L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("c000")); + assertThat(response.getHits().getHits()[1].getId(), equalTo("c001")); + assertThat(response.getHits().getHits()[2].getId(), equalTo("c002")); + assertThat(response.getHits().getHits()[3].getId(), equalTo("c003")); + assertThat(response.getHits().getHits()[4].getId(), equalTo("c004")); + } + ); } } @@ -897,21 +928,25 @@ public void testReIndexingParentAndChildDocuments() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c3")); - assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("c4")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); + assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); + } + ); // re-index for (int i = 0; i < 10; i++) { @@ -922,21 +957,25 @@ public void testReIndexingParentAndChildDocuments() throws Exception { indicesAdmin().prepareRefresh("test").get(); } - searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); + assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); - assertThat(searchResponse.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); + assertNoFailuresAndResponse( + prepareSearch("test").setQuery( + boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); + assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); + } + ); } // Issue #3203 @@ -952,15 +991,15 @@ public void testHasChildQueryWithMinimumScore() throws Exception { createIndexRequest("test", "child", "c4", "p2", "c_field", "x").get(); createIndexRequest("test", "child", "c5", "p2", "c_field", "x").get(); refresh(); - - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)) - .setMinScore(3) // Score needs to be 3 or above! - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p2")); - assertThat(searchResponse.getHits().getAt(0).getScore(), equalTo(3.0f)); + // Score needs to be 3 or above! + assertNoFailuresAndResponse( + prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); + assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f)); + } + ); } public void testParentFieldQuery() throws Exception { @@ -971,8 +1010,9 @@ public void testParentFieldQuery() throws Exception { ensureGreen(); assertHitCount( - client().prepareSearch("test") - .setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))), + prepareSearch("test").setQuery( + boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) + ), 0L ); @@ -980,19 +1020,19 @@ public void testParentFieldQuery() throws Exception { refresh(); assertHitCount( - client().prepareSearch("test") - .setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))), + prepareSearch("test").setQuery( + boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) + ), 1L ); createIndexRequest("test", "child", "c2", "p2").get(); refresh(); assertHitCount( - client().prepareSearch("test") - .setQuery( - boolQuery().should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) - .should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child"))) - ), + prepareSearch("test").setQuery( + boolQuery().should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))) + .should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child"))) + ), 2L ); } @@ -1007,15 +1047,12 @@ public void testParentIdQuery() throws Exception { createIndexRequest("test", "child", "c1", "p1").get(); refresh(); - assertHitCount(client().prepareSearch("test").setQuery(parentId("child", "p1")), 1L); + assertHitCount(prepareSearch("test").setQuery(parentId("child", "p1")), 1L); createIndexRequest("test", "child", "c2", "p2").get(); refresh(); - assertHitCount( - client().prepareSearch("test").setQuery(boolQuery().should(parentId("child", "p1")).should(parentId("child", "p2"))), - 2L - ); + assertHitCount(prepareSearch("test").setQuery(boolQuery().should(parentId("child", "p1")).should(parentId("child", "p2"))), 2L); } public void testHasChildNotBeingCached() throws IOException { @@ -1037,20 +1074,18 @@ public void testHasChildNotBeingCached() throws IOException { indicesAdmin().prepareFlush("test").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), + 1L + ); createIndexRequest("test", "child", "c2", "p2", "c_field", "blue").get(); indicesAdmin().prepareRefresh("test").get(); - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), + 2L + ); } private QueryBuilder randomHasChild(String type, String field, String value) { @@ -1104,34 +1139,32 @@ public void testHasChildQueryOnlyReturnsSingleChildType() throws Exception { refresh(); assertHitCount( - client().prepareSearch("grandissue") - .setQuery( - boolQuery().must( - hasChildQuery( - "parent", - boolQuery().must( - hasChildQuery("child_type_one", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None) - ), - ScoreMode.None - ) + prepareSearch("grandissue").setQuery( + boolQuery().must( + hasChildQuery( + "parent", + boolQuery().must( + hasChildQuery("child_type_one", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None) + ), + ScoreMode.None ) - ), + ) + ), 1L ); assertHitCount( - client().prepareSearch("grandissue") - .setQuery( - boolQuery().must( - hasChildQuery( - "parent", - boolQuery().must( - hasChildQuery("child_type_two", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None) - ), - ScoreMode.None - ) + prepareSearch("grandissue").setQuery( + boolQuery().must( + hasChildQuery( + "parent", + boolQuery().must( + hasChildQuery("child_type_two", boolQuery().must(queryStringQuery("name:William*")), ScoreMode.None) + ), + ScoreMode.None ) - ), + ) + ), 0L ); } @@ -1196,23 +1229,21 @@ public void testHasChildQueryWithNestedInnerObjects() throws Exception { refresh(); ScoreMode scoreMode = randomFrom(ScoreMode.values()); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( boolQuery().must(hasChildQuery("child", termQuery("c_field", "blue"), scoreMode)) .filter(boolQuery().mustNot(termQuery("p_field", "3"))) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + ), + 1L + ); - searchResponse = client().prepareSearch("test") - .setQuery( + assertHitCountAndNoFailures( + prepareSearch("test").setQuery( boolQuery().must(hasChildQuery("child", termQuery("c_field", "red"), scoreMode)) .filter(boolQuery().mustNot(termQuery("p_field", "3"))) - ) - .get(); - assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + ), + 2L + ); } public void testNamedFilters() throws Exception { @@ -1224,33 +1255,45 @@ public void testNamedFilters() throws Exception { createIndexRequest("test", "child", "c1", parentId, "c_field", "1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max).queryName("test")) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + assertResponse( + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max).queryName("test")), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true).queryName("test")) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + assertResponse( + prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true).queryName("test")), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None).queryName("test"))) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + assertResponse( + prepareSearch("test").setQuery( + constantScoreQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None).queryName("test")) + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + } + ); - searchResponse = client().prepareSearch("test") - .setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false).queryName("test"))) - .get(); - assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + assertResponse( + prepareSearch("test").setQuery( + constantScoreQuery(hasParentQuery("parent", termQuery("p_field", "1"), false).queryName("test")) + ), + response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getMatchedQueries()[0], equalTo("test")); + } + ); } public void testParentChildQueriesNoParentType() throws Exception { @@ -1262,35 +1305,35 @@ public void testParentChildQueriesNoParentType() throws Exception { refresh(); try { - client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get(); + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)).get(); + prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test").setPostFilter(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get(); + prepareSearch("test").setPostFilter(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.None)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get(); + prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "1"), true)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); } try { - client().prepareSearch("test").setPostFilter(hasParentQuery("parent", termQuery("p_field", "1"), false)).get(); + prepareSearch("test").setPostFilter(hasParentQuery("parent", termQuery("p_field", "1"), false)).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); @@ -1320,28 +1363,27 @@ public void testParentChildCaching() throws Exception { indicesAdmin().prepareRefresh("test").get(); for (int i = 0; i < 2; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery( + assertHitCount( + prepareSearch().setQuery( boolQuery().must(matchAllQuery()) .filter( boolQuery().must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None)).must(matchAllQuery()) ) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + ), + 2L + ); } createIndexRequest("test", "child", "c3", "p2", "c_field", "blue").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery( + assertHitCount( + prepareSearch().setQuery( boolQuery().must(matchAllQuery()) .filter(boolQuery().must(hasChildQuery("child", matchQuery("c_field", "red"), ScoreMode.None)).must(matchAllQuery())) - ) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + ), + 1L + ); } public void testParentChildQueriesViaScrollApi() throws Exception { @@ -1361,8 +1403,7 @@ public void testParentChildQueriesViaScrollApi() throws Exception { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", matchAllQuery(), false)) }; for (QueryBuilder query : queries) { - SearchResponse scrollResponse = client().prepareSearch("test") - .setScroll(TimeValue.timeValueSeconds(30)) + SearchResponse scrollResponse = prepareSearch("test").setScroll(TimeValue.timeValueSeconds(30)) .setSize(1) .addStoredField("_id") .setQuery(query) @@ -1409,7 +1450,8 @@ private List createMinMaxDocBuilders() { return indexBuilders; } - private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer maxChildren) throws SearchPhaseExecutionException { + private SearchRequestBuilder minMaxQuery(ScoreMode scoreMode, int minChildren, Integer maxChildren) + throws SearchPhaseExecutionException { HasChildQueryBuilder hasChildQuery = hasChildQuery( "child", QueryBuilders.functionScoreQuery( @@ -1422,7 +1464,7 @@ private SearchResponse minMaxQuery(ScoreMode scoreMode, int minChildren, Integer scoreMode ).minMaxChildren(minChildren, maxChildren != null ? maxChildren : HasChildQueryBuilder.DEFAULT_MAX_CHILDREN); - return client().prepareSearch("test").setQuery(hasChildQuery).addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC).get(); + return prepareSearch("test").setQuery(hasChildQuery).addSort("_score", SortOrder.DESC).addSort("id", SortOrder.ASC); } public void testMinMaxChildren() throws Exception { @@ -1430,268 +1472,259 @@ public void testMinMaxChildren() throws Exception { ensureGreen(); indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0])); - SearchResponse response; // Score mode = NONE - response = minMaxQuery(ScoreMode.None, 1, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.None, 2, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.None, 3, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.None, 4, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - - response = minMaxQuery(ScoreMode.None, 1, 4); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.None, 1, 3); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.None, 1, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.None, 2, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + }); + + assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L); + + assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); + }); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> minMaxQuery(ScoreMode.None, 3, 2)); assertThat(e.getMessage(), equalTo("[has_child] 'max_children' is less than 'min_children'")); // Score mode = SUM - response = minMaxQuery(ScoreMode.Total, 1, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Total, 2, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); - - response = minMaxQuery(ScoreMode.Total, 3, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); - - response = minMaxQuery(ScoreMode.Total, 4, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - - response = minMaxQuery(ScoreMode.Total, 1, 4); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Total, 1, 3); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Total, 1, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Total, 2, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); + }); + + assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L); + + assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + }); e = expectThrows(IllegalArgumentException.class, () -> minMaxQuery(ScoreMode.Total, 3, 2)); assertThat(e.getMessage(), equalTo("[has_child] 'max_children' is less than 'min_children'")); // Score mode = MAX - response = minMaxQuery(ScoreMode.Max, 1, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Max, 2, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); - - response = minMaxQuery(ScoreMode.Max, 3, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); - - response = minMaxQuery(ScoreMode.Max, 4, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - - response = minMaxQuery(ScoreMode.Max, 1, 4); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Max, 1, 3); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Max, 1, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Max, 2, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + }); + + assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L); + + assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + }); e = expectThrows(IllegalArgumentException.class, () -> minMaxQuery(ScoreMode.Max, 3, 2)); assertThat(e.getMessage(), equalTo("[has_child] 'max_children' is less than 'min_children'")); // Score mode = AVG - response = minMaxQuery(ScoreMode.Avg, 1, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Avg, 2, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); - - response = minMaxQuery(ScoreMode.Avg, 3, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); - - response = minMaxQuery(ScoreMode.Avg, 4, null); - - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - - response = minMaxQuery(ScoreMode.Avg, 1, 4); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Avg, 1, 3); - - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); - assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Avg, 1, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); - assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); - assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); - - response = minMaxQuery(ScoreMode.Avg, 2, 2); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); - assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); + assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + }); + + assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L); + + assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1.5f)); + assertThat(response.getHits().getHits()[2].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[2].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); + assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); + assertThat(response.getHits().getHits()[1].getScore(), equalTo(1f)); + }); + + assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); + assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); + }); e = expectThrows(IllegalArgumentException.class, () -> minMaxQuery(ScoreMode.Avg, 3, 2)); assertThat(e.getMessage(), equalTo("[has_child] 'max_children' is less than 'min_children'")); @@ -1707,12 +1740,12 @@ public void testHasParentInnerQueryType() { // make sure that when we explicitly set a type, the inner query is executed in the context of the child type instead assertSearchHits( - client().prepareSearch("test").setQuery(hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)), + prepareSearch("test").setQuery(hasChildQuery("child-type", new IdsQueryBuilder().addIds("child-id"), ScoreMode.None)), "parent-id" ); // make sure that when we explicitly set a type, the inner query is executed in the context of the parent type instead assertSearchHits( - client().prepareSearch("test").setQuery(hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)), + prepareSearch("test").setQuery(hasParentQuery("parent-type", new IdsQueryBuilder().addIds("parent-id"), false)), "child-id" ); } @@ -1744,29 +1777,31 @@ public void testHighlightersIgnoreParentChild() throws IOException { String[] highlightTypes = new String[] { "plain", "fvh", "unified" }; for (String highlightType : highlightTypes) { logger.info("Testing with highlight type [{}]", highlightType); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox")) .must(new HasChildQueryBuilder("child-type", new MatchAllQueryBuilder(), ScoreMode.None)) - ) - .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))) - .get(); - assertHitCount(searchResponse, 1); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("parent-id")); - HighlightField highlightField = searchResponse.getHits().getAt(0).getHighlightFields().get("searchText"); - assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox")); - - searchResponse = client().prepareSearch("test") - .setQuery( + ).highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("parent-id")); + HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("searchText"); + assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox")); + } + ); + + assertResponse( + prepareSearch("test").setQuery( new BoolQueryBuilder().must(new MatchQueryBuilder("searchText", "fox")) .must(new HasParentQueryBuilder("parent-type", new MatchAllQueryBuilder(), false)) - ) - .highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))) - .get(); - assertHitCount(searchResponse, 1); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("child-id")); - highlightField = searchResponse.getHits().getAt(0).getHighlightFields().get("searchText"); - assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox")); + ).highlighter(new HighlightBuilder().field(new HighlightBuilder.Field("searchText").highlighterType(highlightType))), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("child-id")); + HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("searchText"); + assertThat(highlightField.getFragments()[0].string(), equalTo("quick brown fox")); + } + ); } } @@ -1783,12 +1818,14 @@ public void testAliasesFilterWithHasChildQuery() throws Exception { ); assertAcked(indicesAdmin().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery(), false))); - SearchResponse response = client().prepareSearch("filter1").get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - response = client().prepareSearch("filter2").get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + assertResponse(prepareSearch("filter1"), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + }); + assertResponse(prepareSearch("filter2"), response -> { + assertHitCount(response, 1L); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + }); } } diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java index 2e647e6ea08e5..39a84f2d16d7f 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.ArrayUtil; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.index.IndexSettings; @@ -52,9 +51,12 @@ import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -113,59 +115,63 @@ public void testSimpleParentChild() throws Exception { requests.add(createIndexRequest("articles", "comment", "c6", "p2", "message", "elephant scared by mice x y")); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("articles") - .setQuery(hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("p1")); - assertThat(response.getHits().getAt(0).getShard(), notNullValue()); - - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); - - assertThat(innerHits.getAt(0).getId(), equalTo("c1")); - assertThat(innerHits.getAt(1).getId(), equalTo("c2")); + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( + hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder()) + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("p1")); + assertThat(response.getHits().getAt(0).getShard(), notNullValue()); + + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getTotalHits().value, equalTo(2L)); + + assertThat(innerHits.getAt(0).getId(), equalTo("c1")); + assertThat(innerHits.getAt(1).getId(), equalTo("c2")); + } + ); final boolean seqNoAndTerm = randomBoolean(); - response = client().prepareSearch("articles") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None).innerHit( new InnerHitBuilder().setSeqNoAndPrimaryTerm(seqNoAndTerm) ) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("p2")); - - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); - - assertThat(innerHits.getAt(0).getId(), equalTo("c4")); - assertThat(innerHits.getAt(1).getId(), equalTo("c5")); - assertThat(innerHits.getAt(2).getId(), equalTo("c6")); - - if (seqNoAndTerm) { - assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L)); - assertThat(innerHits.getAt(1).getPrimaryTerm(), equalTo(1L)); - assertThat(innerHits.getAt(2).getPrimaryTerm(), equalTo(1L)); - assertThat(innerHits.getAt(0).getSeqNo(), greaterThanOrEqualTo(0L)); - assertThat(innerHits.getAt(1).getSeqNo(), greaterThanOrEqualTo(0L)); - assertThat(innerHits.getAt(2).getSeqNo(), greaterThanOrEqualTo(0L)); - } else { - assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); - assertThat(innerHits.getAt(1).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); - assertThat(innerHits.getAt(2).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); - assertThat(innerHits.getAt(0).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); - assertThat(innerHits.getAt(1).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); - assertThat(innerHits.getAt(2).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); - } + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("p2")); + + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getTotalHits().value, equalTo(3L)); + + assertThat(innerHits.getAt(0).getId(), equalTo("c4")); + assertThat(innerHits.getAt(1).getId(), equalTo("c5")); + assertThat(innerHits.getAt(2).getId(), equalTo("c6")); + + if (seqNoAndTerm) { + assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(1L)); + assertThat(innerHits.getAt(1).getPrimaryTerm(), equalTo(1L)); + assertThat(innerHits.getAt(2).getPrimaryTerm(), equalTo(1L)); + assertThat(innerHits.getAt(0).getSeqNo(), greaterThanOrEqualTo(0L)); + assertThat(innerHits.getAt(1).getSeqNo(), greaterThanOrEqualTo(0L)); + assertThat(innerHits.getAt(2).getSeqNo(), greaterThanOrEqualTo(0L)); + } else { + assertThat(innerHits.getAt(0).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); + assertThat(innerHits.getAt(1).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); + assertThat(innerHits.getAt(2).getPrimaryTerm(), equalTo(UNASSIGNED_PRIMARY_TERM)); + assertThat(innerHits.getAt(0).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); + assertThat(innerHits.getAt(1).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); + assertThat(innerHits.getAt(2).getSeqNo(), equalTo(UNASSIGNED_SEQ_NO)); + } + } + ); - response = client().prepareSearch("articles") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( new InnerHitBuilder().addFetchField("message") .setHighlightBuilder(new HighlightBuilder().field("message")) @@ -173,27 +179,32 @@ public void testSimpleParentChild() throws Exception { .setSize(1) .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) ) - ) - .get(); - assertNoFailures(response); - innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), equalTo("fox eat quick")); - assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(message:fox")); - assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("fox eat quick")); - assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); - - response = client().prepareSearch("articles") - .setQuery( + ), + response -> { + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat( + innerHits.getAt(0).getHighlightFields().get("message").getFragments()[0].string(), + equalTo("fox eat quick") + ); + assertThat(innerHits.getAt(0).getExplanation().toString(), containsString("weight(message:fox")); + assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("fox eat quick")); + assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); + } + ); + + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( hasChildQuery("comment", matchQuery("message", "fox"), ScoreMode.None).innerHit( new InnerHitBuilder().addDocValueField("message").setSize(1) ) - ) - .get(); - assertNoFailures(response); - innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getHits().length, equalTo(1)); - assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat")); + ), + response -> { + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getHits().length, equalTo(1)); + assertThat(innerHits.getAt(0).getFields().get("message").getValue().toString(), equalTo("eat")); + } + ); } public void testRandomParentChild() throws Exception { @@ -256,43 +267,39 @@ public void testRandomParentChild() throws Exception { ) ) ); - SearchResponse searchResponse = client().prepareSearch("idx") - .setSize(numDocs) - .addSort("id", SortOrder.ASC) - .setQuery(boolQuery) - .get(); - - assertNoFailures(searchResponse); - assertHitCount(searchResponse, numDocs); - assertThat(searchResponse.getHits().getHits().length, equalTo(numDocs)); - int offset1 = 0; - int offset2 = 0; - for (int parent = 0; parent < numDocs; parent++) { - SearchHit searchHit = searchResponse.getHits().getAt(parent); - assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent))); - assertThat(searchHit.getShard(), notNullValue()); - - SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); - for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { - SearchHit innerHit = inner.getAt(child); - String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); - assertThat(innerHit.getId(), equalTo(childId)); - assertThat(innerHit.getNestedIdentity(), nullValue()); - } - offset1 += child1InnerObjects[parent]; - - inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); - for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { - SearchHit innerHit = inner.getAt(child); - String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); - assertThat(innerHit.getId(), equalTo(childId)); - assertThat(innerHit.getNestedIdentity(), nullValue()); + assertNoFailuresAndResponse(prepareSearch("idx").setSize(numDocs).addSort("id", SortOrder.ASC).setQuery(boolQuery), response -> { + assertHitCount(response, numDocs); + assertThat(response.getHits().getHits().length, equalTo(numDocs)); + + int offset1 = 0; + int offset2 = 0; + for (int parent = 0; parent < numDocs; parent++) { + SearchHit searchHit = response.getHits().getAt(parent); + assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent))); + assertThat(searchHit.getShard(), notNullValue()); + + SearchHits inner = searchHit.getInnerHits().get("a"); + assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); + for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { + SearchHit innerHit = inner.getAt(child); + String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); + assertThat(innerHit.getId(), equalTo(childId)); + assertThat(innerHit.getNestedIdentity(), nullValue()); + } + offset1 += child1InnerObjects[parent]; + + inner = searchHit.getInnerHits().get("b"); + assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); + for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { + SearchHit innerHit = inner.getAt(child); + String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); + assertThat(innerHit.getId(), equalTo(childId)); + assertThat(innerHit.getNestedIdentity(), nullValue()); + } + offset2 += child2InnerObjects[parent]; } - offset2 += child2InnerObjects[parent]; - } + }); } public void testInnerHitsOnHasParent() throws Exception { @@ -329,25 +336,26 @@ public void testInnerHitsOnHasParent() throws Exception { ); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("stack") - .addSort("id", SortOrder.ASC) - .setQuery( - boolQuery().must(matchQuery("body", "fail2ban")) - .must(hasParentQuery("question", matchAllQuery(), false).innerHit(new InnerHitBuilder())) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 2); - - SearchHit searchHit = response.getHits().getAt(0); - assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); - assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); - - searchHit = response.getHits().getAt(1); - assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); - assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); + assertNoFailuresAndResponse( + prepareSearch("stack").addSort("id", SortOrder.ASC) + .setQuery( + boolQuery().must(matchQuery("body", "fail2ban")) + .must(hasParentQuery("question", matchAllQuery(), false).innerHit(new InnerHitBuilder())) + ), + response -> { + assertHitCount(response, 2); + + SearchHit searchHit = response.getHits().getAt(0); + assertThat(searchHit.getId(), equalTo("3")); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); + + searchHit = response.getHits().getAt(1); + assertThat(searchHit.getId(), equalTo("4")); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); + } + ); } public void testParentChildMultipleLayers() throws Exception { @@ -372,51 +380,51 @@ public void testParentChildMultipleLayers() throws Exception { requests.add(createIndexRequest("articles", "remark", "6", "4", "message", "bad").setRouting("2")); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("articles") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( hasChildQuery( "comment", hasChildQuery("remark", matchQuery("message", "good"), ScoreMode.None).innerHit(new InnerHitBuilder()), ScoreMode.None ).innerHit(new InnerHitBuilder()) - ) - .get(); - - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("1")); - - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getAt(0).getId(), equalTo("3")); - - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getAt(0).getId(), equalTo("5")); + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("1")); + + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getAt(0).getId(), equalTo("3")); + + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getAt(0).getId(), equalTo("5")); + } + ); - response = client().prepareSearch("articles") - .setQuery( + assertNoFailuresAndResponse( + prepareSearch("articles").setQuery( hasChildQuery( "comment", hasChildQuery("remark", matchQuery("message", "bad"), ScoreMode.None).innerHit(new InnerHitBuilder()), ScoreMode.None ).innerHit(new InnerHitBuilder()) - ) - .get(); - - assertNoFailures(response); - assertHitCount(response, 1); - assertSearchHit(response, 1, hasId("2")); - - assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); - innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getAt(0).getId(), equalTo("4")); - - innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getAt(0).getId(), equalTo("6")); + ), + response -> { + assertHitCount(response, 1); + assertSearchHit(response, 1, hasId("2")); + + assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getAt(0).getId(), equalTo("4")); + + innerHits = innerHits.getAt(0).getInnerHits().get("remark"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getAt(0).getId(), equalTo("6")); + } + ); } public void testRoyals() throws Exception { @@ -450,9 +458,8 @@ public void testRoyals() throws Exception { requests.add(createIndexRequest("royals", "baron", "baron3", "earl3").setRouting("king")); requests.add(createIndexRequest("royals", "baron", "baron4", "earl4").setRouting("king")); indexRandom(true, requests); - - SearchResponse response = client().prepareSearch("royals") - .setQuery( + assertResponse( + prepareSearch("royals").setQuery( boolQuery().filter( hasParentQuery( "prince", @@ -469,41 +476,43 @@ public void testRoyals() throws Exception { new InnerHitBuilder().addSort(SortBuilders.fieldSort("id").order(SortOrder.ASC)).setName("earls").setSize(4) ) ) - ) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); - - SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); - assertThat(innerHits.getTotalHits().value, equalTo(4L)); - assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); - assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); - assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); - assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); - - SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); - - innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); - - innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); - - innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); - - innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerHits.getAt(0).getId(), equalTo("prince")); - - innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); - assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); + ), + response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); + + SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); + assertThat(innerHits.getTotalHits().value, equalTo(4L)); + assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); + assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); + assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); + assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); + + SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); + assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); + + innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); + assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); + + innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); + assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); + + innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); + assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); + + innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); + assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getAt(0).getId(), equalTo("prince")); + + innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); + assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); + } + ); } public void testMatchesQueriesParentChildInnerHits() throws Exception { @@ -516,32 +525,34 @@ public void testMatchesQueriesParentChildInnerHits() throws Exception { requests.add(createIndexRequest("index", "child", "5", "2", "field", "value1")); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("index") - .setQuery( + assertResponse( + prepareSearch("index").setQuery( hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None).innerHit(new InnerHitBuilder()) - ) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 2); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); - - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); + ).addSort("id", SortOrder.ASC), + response -> { + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); + + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); + } + ); QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None).innerHit( new InnerHitBuilder() ); - response = client().prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC).get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); + assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> { + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); + }); } public void testUseMaxDocInsteadOfSize() throws Exception { @@ -560,9 +571,7 @@ public void testUseMaxDocInsteadOfSize() throws Exception { QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None).innerHit( new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1) ); - SearchResponse response = client().prepareSearch("index1").setQuery(query).get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures(prepareSearch("index1").setQuery(query), 1L); } public void testNestedInnerHitWrappedInParentChildInnerhit() { @@ -578,8 +587,8 @@ public void testNestedInnerHitWrappedInParentChildInnerhit() { createIndexRequest("test", "parent_type", "1", null, "key", "value").get(); createIndexRequest("test", "child_type", "2", "1", "nested_type", Collections.singletonMap("key", "value")).get(); refresh(); - SearchResponse response = client().prepareSearch("test") - .setQuery( + assertResponse( + prepareSearch("test").setQuery( boolQuery().must(matchQuery("key", "value")) .should( hasChildQuery( @@ -588,13 +597,21 @@ public void testNestedInnerHitWrappedInParentChildInnerhit() { ScoreMode.None ).innerHit(new InnerHitBuilder()) ) - ) - .get(); - assertHitCount(response, 1); - SearchHit hit = response.getHits().getAt(0); - String parentId = (String) extractValue("join_field.parent", hit.getInnerHits().get("child_type").getAt(0).getSourceAsMap()); - assertThat(parentId, equalTo("1")); - assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue()); + ), + response -> { + assertHitCount(response, 1); + SearchHit hit = response.getHits().getAt(0); + String parentId = (String) extractValue( + "join_field.parent", + hit.getInnerHits().get("child_type").getAt(0).getSourceAsMap() + ); + assertThat(parentId, equalTo("1")); + assertThat( + hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), + nullValue() + ); + } + ); } public void testInnerHitsWithIgnoreUnmapped() { @@ -612,18 +629,16 @@ public void testInnerHitsWithIgnoreUnmapped() { createIndexRequest("index1", "child_type", "2", "1").get(); client().prepareIndex("index2").setId("3").setSource("key", "value").get(); refresh(); - - SearchResponse response = client().prepareSearch("index1", "index2") - .setQuery( + assertSearchHitsWithoutFailures( + prepareSearch("index1", "index2").setQuery( boolQuery().should( hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) .innerHit(new InnerHitBuilder().setIgnoreUnmapped(true)) ).should(termQuery("key", "value")) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 2); - assertSearchHits(response, "1", "3"); + ), + "1", + "3" + ); } public void testTooHighResultWindow() { @@ -639,24 +654,20 @@ public void testTooHighResultWindow() { createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get(); createIndexRequest("index1", "child_type", "2", "1").get(); refresh(); - - SearchResponse response = client().prepareSearch("index1") - .setQuery( + assertHitCountAndNoFailures( + prepareSearch("index1").setQuery( hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) .innerHit(new InnerHitBuilder().setFrom(50).setSize(10).setName("_name")) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); + ), + 1 + ); Exception e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("index1") - .setQuery( - hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")) - ) - .get() + () -> prepareSearch("index1").setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")) + ).get() ); assertThat( e.getCause().getMessage(), @@ -664,12 +675,10 @@ public void testTooHighResultWindow() { ); e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("index1") - .setQuery( - hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")) - ) - .get() + () -> prepareSearch("index1").setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")) + ).get() ); assertThat( e.getCause().getMessage(), @@ -677,18 +686,16 @@ public void testTooHighResultWindow() { ); updateIndexSettings(Settings.builder().put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110), "index1"); assertNoFailures( - client().prepareSearch("index1") - .setQuery( - hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")) - ) + prepareSearch("index1").setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(100).setSize(10).setName("_name")) + ) ); assertNoFailures( - client().prepareSearch("index1") - .setQuery( - hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) - .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")) - ) + prepareSearch("index1").setQuery( + hasChildQuery("child_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) + .innerHit(new InnerHitBuilder().setFrom(10).setSize(100).setName("_name")) + ) ); } } diff --git a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 5d6d51e548eea..9362080c9cb33 100644 --- a/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -98,14 +98,13 @@ public void testPercolatorQuery() throws Exception { BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject()); logger.info("percolating empty doc"); - SearchResponse response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); logger.info("percolating doc with 1 field"); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -116,8 +115,7 @@ public void testPercolatorQuery() throws Exception { source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()); logger.info("percolating doc with 2 fields"); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -129,19 +127,16 @@ public void testPercolatorQuery() throws Exception { assertThat(response.getHits().getAt(2).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); logger.info("percolating doc with 2 fields"); - response = client().prepareSearch() - .setQuery( - new PercolateQueryBuilder( - "query", - Arrays.asList( - BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()) - ), - XContentType.JSON - ) + response = prepareSearch().setQuery( + new PercolateQueryBuilder( + "query", + Arrays.asList( + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject()) + ), + XContentType.JSON ) - .addSort("id", SortOrder.ASC) - .get(); + ).addSort("id", SortOrder.ASC).get(); assertHitCount(response, 3); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); @@ -238,44 +233,44 @@ public void testPercolatorRangeQueries() throws Exception { // Test long range: BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject()); - SearchResponse response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); logger.info("response={}", response); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject()); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); // Test double range: source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject()); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("6")); assertThat(response.getHits().getAt(1).getId(), equalTo("4")); source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject()); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("4")); // Test IP range: source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject()); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("9")); assertThat(response.getHits().getAt(1).getId(), equalTo("7")); source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject()); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("7")); // Test date range: source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject()); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("10")); } @@ -325,8 +320,7 @@ public void testPercolatorGeoQueries() throws Exception { BytesReference source = BytesReference.bytes( jsonBuilder().startObject().startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject().endObject() ); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -365,15 +359,12 @@ public void testPercolatorQueryExistingDocument() throws Exception { indicesAdmin().prepareRefresh().get(); logger.info("percolating empty doc"); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)) - .get(); + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); logger.info("percolating doc with 1 field"); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null)) + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 2); @@ -381,8 +372,7 @@ public void testPercolatorQueryExistingDocument() throws Exception { assertThat(response.getHits().getAt(1).getId(), equalTo("2")); logger.info("percolating doc with 2 fields"); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null)) + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -404,7 +394,7 @@ public void testPercolatorQueryExistingDocumentSourceDisabled() throws Exception logger.info("percolating empty doc with source disabled"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - client().prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)).get(); + prepareSearch().setQuery(new PercolateQueryBuilder("query", "test", "1", null, null, null)).get(); }); assertThat(e.getMessage(), containsString("source disabled")); } @@ -488,8 +478,7 @@ public void testPercolatorSpecificQueries() throws Exception { .field("field2", "the quick brown fox falls down into the well") .endObject() ); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(response, 3); @@ -542,8 +531,7 @@ public void testPercolatorQueryWithHighlighting() throws Exception { BytesReference document = BytesReference.bytes( jsonBuilder().startObject().field("field1", "The quick brown fox jumps over the lazy dog").endObject() ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) + SearchResponse searchResponse = prepareSearch().setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) .highlighter(new HighlightBuilder().field("field1")) .addSort("id", SortOrder.ASC) .get(); @@ -574,14 +562,10 @@ public void testPercolatorQueryWithHighlighting() throws Exception { jsonBuilder().startObject().field("field1", "The quick brown fox jumps").endObject() ); BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject().field("field1", "over the lazy dog").endObject()); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) - .should(new PercolateQueryBuilder("query", document2, XContentType.JSON).setName("query2")) - ) - .highlighter(new HighlightBuilder().field("field1")) - .addSort("id", SortOrder.ASC) - .get(); + searchResponse = prepareSearch().setQuery( + boolQuery().should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) + .should(new PercolateQueryBuilder("query", document2, XContentType.JSON).setName("query2")) + ).highlighter(new HighlightBuilder().field("field1")).addSort("id", SortOrder.ASC).get(); logger.info("searchResponse={}", searchResponse); assertHitCount(searchResponse, 5); @@ -606,22 +590,18 @@ public void testPercolatorQueryWithHighlighting() throws Exception { equalTo("The quick brown fox jumps") ); - searchResponse = client().prepareSearch() - .setQuery( - new PercolateQueryBuilder( - "query", - Arrays.asList( - BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) - ), - XContentType.JSON - ) + searchResponse = prepareSearch().setQuery( + new PercolateQueryBuilder( + "query", + Arrays.asList( + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) + ), + XContentType.JSON ) - .highlighter(new HighlightBuilder().field("field1")) - .addSort("id", SortOrder.ASC) - .get(); + ).highlighter(new HighlightBuilder().field("field1")).addSort("id", SortOrder.ASC).get(); assertHitCount(searchResponse, 5); assertThat( searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), @@ -660,32 +640,28 @@ public void testPercolatorQueryWithHighlighting() throws Exception { equalTo("brown fox") ); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().should( + searchResponse = prepareSearch().setQuery( + boolQuery().should( + new PercolateQueryBuilder( + "query", + Arrays.asList( + BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) + ), + XContentType.JSON + ).setName("query1") + ) + .should( new PercolateQueryBuilder( "query", Arrays.asList( - BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()) + BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), + BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) ), XContentType.JSON - ).setName("query1") + ).setName("query2") ) - .should( - new PercolateQueryBuilder( - "query", - Arrays.asList( - BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()), - BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject()) - ), - XContentType.JSON - ).setName("query2") - ) - ) - .highlighter(new HighlightBuilder().field("field1")) - .addSort("id", SortOrder.ASC) - .get(); + ).highlighter(new HighlightBuilder().field("field1")).addSort("id", SortOrder.ASC).get(); logger.info("searchResponse={}", searchResponse); assertHitCount(searchResponse, 5); assertThat( @@ -764,9 +740,9 @@ public void testTakePositionOffsetGapIntoAccount() throws Exception { .get(); indicesAdmin().prepareRefresh().get(); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}"), XContentType.JSON)) - .get(); + SearchResponse response = prepareSearch().setQuery( + new PercolateQueryBuilder("query", new BytesArray("{\"field\" : [\"brown\", \"fox\"]}"), XContentType.JSON) + ).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -846,16 +822,14 @@ public void testWithMultiplePercolatorFields() throws Exception { indicesAdmin().prepareRefresh().get(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject()); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) .setIndices("test1") .get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getIndex(), equalTo("test1")); - response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("object_field." + queryFieldName, source, XContentType.JSON)) + response = prepareSearch().setQuery(new PercolateQueryBuilder("object_field." + queryFieldName, source, XContentType.JSON)) .setIndices("test2") .get(); assertHitCount(response, 1); @@ -942,10 +916,67 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .get(); indicesAdmin().prepareRefresh().get(); - SearchResponse response = client().prepareSearch() - .setQuery( - new PercolateQueryBuilder( - "query", + SearchResponse response = prepareSearch().setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") + .startArray("employee") + .startObject() + .field("name", "virginia potts") + .endObject() + .startObject() + .field("name", "tony stark") + .endObject() + .endArray() + .endObject() + ), + XContentType.JSON + ) + ).addSort("id", SortOrder.ASC).get(); + assertHitCount(response, 2); + assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("q3")); + + response = prepareSearch().setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "notstark") + .startArray("employee") + .startObject() + .field("name", "virginia stark") + .endObject() + .startObject() + .field("name", "tony stark") + .endObject() + .endArray() + .endObject() + ), + XContentType.JSON + ) + ).addSort("id", SortOrder.ASC).get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); + + response = prepareSearch().setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), + XContentType.JSON + ) + ).addSort("id", SortOrder.ASC).get(); + assertHitCount(response, 1); + assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); + + response = prepareSearch().setQuery( + new PercolateQueryBuilder( + "query", + Arrays.asList( BytesReference.bytes( XContentFactory.jsonBuilder() .startObject() @@ -960,104 +991,35 @@ public void testPercolateQueryWithNestedDocuments() throws Exception { .endArray() .endObject() ), - XContentType.JSON - ) - ) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 2); - assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("q3")); - - response = client().prepareSearch() - .setQuery( - new PercolateQueryBuilder( - "query", BytesReference.bytes( XContentFactory.jsonBuilder() .startObject() - .field("companyname", "notstark") + .field("companyname", "stark") .startArray("employee") .startObject() - .field("name", "virginia stark") + .field("name", "peter parker") .endObject() .startObject() - .field("name", "tony stark") + .field("name", "virginia potts") .endObject() .endArray() .endObject() ), - XContentType.JSON - ) - ) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); - - response = client().prepareSearch() - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()), - XContentType.JSON - ) - ) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("q3")); - - response = client().prepareSearch() - .setQuery( - new PercolateQueryBuilder( - "query", - Arrays.asList( - BytesReference.bytes( - XContentFactory.jsonBuilder() - .startObject() - .field("companyname", "stark") - .startArray("employee") - .startObject() - .field("name", "virginia potts") - .endObject() - .startObject() - .field("name", "tony stark") - .endObject() - .endArray() - .endObject() - ), - BytesReference.bytes( - XContentFactory.jsonBuilder() - .startObject() - .field("companyname", "stark") - .startArray("employee") - .startObject() - .field("name", "peter parker") - .endObject() - .startObject() - .field("name", "virginia potts") - .endObject() - .endArray() - .endObject() - ), - BytesReference.bytes( - XContentFactory.jsonBuilder() - .startObject() - .field("companyname", "stark") - .startArray("employee") - .startObject() - .field("name", "peter parker") - .endObject() - .endArray() - .endObject() - ) - ), - XContentType.JSON - ) + BytesReference.bytes( + XContentFactory.jsonBuilder() + .startObject() + .field("companyname", "stark") + .startArray("employee") + .startObject() + .field("name", "peter parker") + .endObject() + .endArray() + .endObject() + ) + ), + XContentType.JSON ) - .addSort("id", SortOrder.ASC) - .get(); + ).addSort("id", SortOrder.ASC).get(); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); @@ -1101,48 +1063,44 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { MultiSearchResponse response = client().prepareMultiSearch() .add( - client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), - XContentType.JSON - ) + prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), + XContentType.JSON ) + ) ) .add( - client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), - XContentType.YAML - ) + prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), + XContentType.YAML ) + ) ) .add( - client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()), - XContentType.JSON - ) + prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b c").endObject()), + XContentType.JSON ) + ) ) .add( - client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), - XContentType.JSON - ) + prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), + XContentType.JSON ) + ) ) - .add(client().prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null))) + .add(prepareSearch("test").setQuery(new PercolateQueryBuilder("query", "test", "5", null, null, null))) .add( - client().prepareSearch("test") // non existing doc, so error element + prepareSearch("test") // non existing doc, so error element .setQuery(new PercolateQueryBuilder("query", "test", "6", null, null, null)) ) .get(); @@ -1192,9 +1150,7 @@ public void testDisallowExpensiveQueries() throws IOException { // Execute with search.allow_expensive_queries = null => default value = false => success BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .get(); + SearchResponse response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); @@ -1204,7 +1160,7 @@ public void testDisallowExpensiveQueries() throws IOException { ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get() + () -> prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get() ); assertEquals( "[percolate] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", @@ -1214,7 +1170,7 @@ public void testDisallowExpensiveQueries() throws IOException { // Set search.allow_expensive_queries setting to "true" ==> success updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); - response = client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); + response = prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).get(); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); @@ -1241,40 +1197,33 @@ public void testWrappedWithConstantScore() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse response = client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "q", - BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), - XContentType.JSON - ) + SearchResponse response = prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON ) - .get(); + ).get(); + assertEquals(1, response.getHits().getTotalHits().value); + + response = prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "q", + BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), + XContentType.JSON + ) + ).addSort("_doc", SortOrder.ASC).get(); assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test") - .setQuery( + response = prepareSearch("test").setQuery( + constantScoreQuery( new PercolateQueryBuilder( "q", BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), XContentType.JSON ) ) - .addSort("_doc", SortOrder.ASC) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - - response = client().prepareSearch("test") - .setQuery( - constantScoreQuery( - new PercolateQueryBuilder( - "q", - BytesReference.bytes(jsonBuilder().startObject().field("d", "2020-02-01T15:00:00.000+11:00").endObject()), - XContentType.JSON - ) - ) - ) - .get(); + ).get(); assertEquals(1, response.getHits().getTotalHits().value); } @@ -1310,48 +1259,40 @@ public void testWithWildcardFieldNames() throws Exception { .execute() .actionGet(); - SearchResponse response = client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "q_simple", - BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), - XContentType.JSON - ) + SearchResponse response = prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "q_simple", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON ) - .get(); + ).get(); assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "q_string", - BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), - XContentType.JSON - ) + response = prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "q_string", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON ) - .get(); + ).get(); assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "q_match", - BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), - XContentType.JSON - ) + response = prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "q_match", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON ) - .get(); + ).get(); assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "q_combo", - BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), - XContentType.JSON - ) + response = prepareSearch("test").setQuery( + new PercolateQueryBuilder( + "q_combo", + BytesReference.bytes(jsonBuilder().startObject().field("text_1", "yada").endObject()), + XContentType.JSON ) - .get(); + ).get(); assertEquals(1, response.getHits().getTotalHits().value); } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index eac647cfff634..886a67443e831 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -597,7 +598,7 @@ static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldTy assert valueLength > 0; TransportVersion transportVersion; - if (indexVersion.before(IndexVersion.V_8_8_0)) { + if (indexVersion.before(IndexVersions.V_8_8_0)) { transportVersion = TransportVersion.fromId(indexVersion.id()); } else { transportVersion = TransportVersion.readVersion(input); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 016d9d3f75a21..c00eaa894dd69 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; @@ -457,7 +458,7 @@ static void createQueryBuilderField( ByteArrayOutputStream stream = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(stream) ) { - if (indexVersion.before(IndexVersion.V_8_8_0)) { + if (indexVersion.before(IndexVersions.V_8_8_0)) { // just use the index version directly // there's a direct mapping from IndexVersion to TransportVersion before 8.8.0 out.setTransportVersion(TransportVersion.fromId(indexVersion.id())); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 5a12e0c9f3a37..b47364e3b1a08 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -40,6 +41,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; @@ -53,6 +55,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.TestDocumentParserContext; +import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoostingQueryBuilder; import org.elasticsearch.index.query.ConstantScoreQueryBuilder; @@ -67,13 +70,17 @@ import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder; +import org.elasticsearch.index.query.functionscore.ScriptScoreQueryBuilder; import org.elasticsearch.indices.TermsLookup; import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.join.query.HasChildQueryBuilder; import org.elasticsearch.join.query.HasParentQueryBuilder; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.DummyQueryParserPlugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.xcontent.XContentBuilder; @@ -92,6 +99,7 @@ import java.util.Collection; import java.util.Collections; import java.util.Comparator; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; @@ -130,7 +138,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { @Override protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class, ParentJoinPlugin.class); + return pluginList( + InternalSettingsPlugin.class, + PercolatorPlugin.class, + FoolMeScriptPlugin.class, + ParentJoinPlugin.class, + CustomQueriesPlugin.class + ); } @Override @@ -540,6 +554,38 @@ public void testPercolatorFieldMapper() throws Exception { assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).get(0).stringValue(), equalTo(EXTRACTION_FAILED)); } + public void testParseScriptScoreQueryWithParams() throws Exception { + addQueryFieldMappings(); + ScriptScoreQueryBuilder scriptScoreQueryBuilder = new ScriptScoreQueryBuilder( + new MatchAllQueryBuilder(), + new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "score", Collections.singletonMap("param", "1")) + ); + ParsedDocument doc = mapperService.documentMapper() + .parse( + new SourceToParse( + "1", + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, scriptScoreQueryBuilder).endObject()), + XContentType.JSON + ) + ); + assertNotNull(doc); + } + + public void testParseCustomParserQuery() throws Exception { + addQueryFieldMappings(); + ParsedDocument doc = mapperService.documentMapper() + .parse( + new SourceToParse( + "1", + BytesReference.bytes( + XContentFactory.jsonBuilder().startObject().field(fieldName, new CustomParserQueryBuilder()).endObject() + ), + XContentType.JSON + ) + ); + assertNotNull(doc); + } + public void testStoringQueries() throws Exception { addQueryFieldMappings(); QueryBuilder[] queries = new QueryBuilder[] { @@ -1106,7 +1152,7 @@ public static class FoolMeScriptPlugin extends MockScriptPlugin { @Override protected Map, Object>> pluginScripts() { - return Collections.singletonMap("return true", (vars) -> true); + return Map.of("return true", (vars) -> true, "score", (vars) -> 0f); } @Override @@ -1114,4 +1160,139 @@ public String pluginScriptLang() { return Script.DEFAULT_SCRIPT_LANG; } } + + public static class CustomQueriesPlugin extends Plugin implements SearchPlugin { + @Override + public List> getQueries() { + return Collections.singletonList( + new QuerySpec( + CustomParserQueryBuilder.NAME, + CustomParserQueryBuilder::new, + CustomParserQueryBuilder::fromXContent + ) + ); + } + } + + public static final class CustomParserQueryBuilder extends AbstractQueryBuilder { + private static final String NAME = "CUSTOM"; + + CustomParserQueryBuilder() {} + + CustomParserQueryBuilder(StreamInput in) throws IOException { + super(in); + } + + @Override + protected void doWriteTo(StreamOutput out) { + // only the superclass has state + } + + @Override + protected Query doToQuery(SearchExecutionContext context) { + return new DummyQueryParserPlugin.DummyQuery(); + } + + @Override + protected int doHashCode() { + return 0; + } + + @Override + protected boolean doEquals(CustomParserQueryBuilder other) { + return true; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.ZERO; + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.array("list", "value0", "value1", "value2"); + builder.array("listOrdered", "value0", "value1", "value2"); + builder.field("map"); + builder.map(Map.of("key1", "value1", "key2", "value2")); + builder.field("mapOrdered"); + builder.map(Map.of("key3", "value3", "key4", "value4")); + builder.field("mapStrings"); + builder.map(Map.of("key5", "value5", "key6", "value6")); + builder.field("mapSupplier"); + builder.map(Map.of("key7", "value7", "key8", "value8")); + builder.endObject(); + } + + public static CustomParserQueryBuilder fromXContent(XContentParser parser) throws IOException { + { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("list", parser.currentName()); + List list = parser.list(); + assertEquals(3, list.size()); + for (int i = 0; i < 3; i++) { + assertEquals("value" + i, list.get(i).toString()); + } + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + } + { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("listOrdered", parser.currentName()); + List listOrdered = parser.listOrderedMap(); + assertEquals(3, listOrdered.size()); + for (int i = 0; i < 3; i++) { + assertEquals("value" + i, listOrdered.get(i).toString()); + } + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + } + { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("map", parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + Map map = parser.map(); + assertEquals(2, map.size()); + assertEquals("value1", map.get("key1").toString()); + assertEquals("value2", map.get("key2").toString()); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + } + { + assertEquals("mapOrdered", parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + Map mapOrdered = parser.mapOrdered(); + assertEquals(2, mapOrdered.size()); + assertEquals("value3", mapOrdered.get("key3").toString()); + assertEquals("value4", mapOrdered.get("key4").toString()); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + } + { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("mapStrings", parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + Map mapStrings = parser.map(); + assertEquals(2, mapStrings.size()); + assertEquals("value5", mapStrings.get("key5").toString()); + assertEquals("value6", mapStrings.get("key6").toString()); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + } + { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals("mapSupplier", parser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + Map mapSupplier = parser.map(HashMap::new, XContentParser::text); + assertEquals(2, mapSupplier.size()); + assertEquals("value7", mapSupplier.get("key7").toString()); + assertEquals("value8", mapSupplier.get("key8").toString()); + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + } + + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + return new CustomParserQueryBuilder(); + } + } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java index bce98ec90d527..05c2c27de40fc 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchTests.java @@ -9,7 +9,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -47,7 +46,7 @@ import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -87,17 +86,17 @@ public void testPercolateScriptQuery() throws IOException { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .execute() .actionGet(); - SearchResponse response = client().prepareSearch("index") - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), - XContentType.JSON - ) - ) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertSearchHitsWithoutFailures( + client().prepareSearch("index") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), + XContentType.JSON + ) + ), + "1" + ); } public void testPercolateQueryWithNestedDocuments_doNotLeakBitsetCacheEntries() throws Exception { @@ -265,17 +264,17 @@ public void testMapUnmappedFieldAsText() throws IOException { .get(); indicesAdmin().prepareRefresh().get(); - SearchResponse response = client().prepareSearch("test") - .setQuery( - new PercolateQueryBuilder( - "query", - BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), - XContentType.JSON - ) - ) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertSearchHitsWithoutFailures( + client().prepareSearch("test") + .setQuery( + new PercolateQueryBuilder( + "query", + BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()), + XContentType.JSON + ) + ), + "1" + ); } public void testRangeQueriesWithNow() throws Exception { diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index dd9bde8035ef4..9947d8a727d28 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -282,7 +282,7 @@ private ReindexRequestBuilder reindexAndPartiallyBlock() throws Exception { ); // Checks that the all documents have been indexed and correctly counted - assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0), numDocs); + assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numDocs); assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); ReindexRequestBuilder builder = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source(INDEX_NAME) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java index 148189152b30a..d7f71fcc510ab 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/BulkByScrollUsesAllScrollDocumentsAfterConflictsIntegTests.java @@ -201,8 +201,7 @@ public void testDeleteByQuery() throws Exception { // Ensure that the write thread blocking task is currently executing barrier.await(); - final SearchResponse searchResponse = client().prepareSearch(sourceIndex) - .setSize(numDocs) // Get all indexed docs + final SearchResponse searchResponse = prepareSearch(sourceIndex).setSize(numDocs) // Get all indexed docs .addSort(SORTING_FIELD, SortOrder.DESC) .execute() .actionGet(); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index feeb989959626..8f9c2b7f34105 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -23,7 +23,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; @@ -264,7 +264,7 @@ public void testMultipleFeatureMigration() throws Exception { .setAliasName(".second-internal-managed-alias") .setPrimaryIndex(".second-int-man-old") .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) - .setSettings(createSettings(IndexVersion.V_7_0_0, 0)) + .setSettings(createSettings(IndexVersions.V_7_0_0, 0)) .setMappings(createMapping(true, true)) .setOrigin(ORIGIN) .setVersionMetaKey(VERSION_META_KEY) diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java index f8f87011405fc..b07eb1b158087 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexPlugin.java @@ -12,35 +12,23 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.reindex.BulkByScrollTask; import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.UpdateByQueryAction; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.ArrayList; import java.util.Arrays; @@ -95,23 +83,10 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - return Collections.singletonList(new ReindexSslConfig(environment.settings(), environment, resourceWatcherService)); + public Collection createComponents(PluginServices services) { + return Collections.singletonList( + new ReindexSslConfig(services.environment().settings(), services.environment(), services.resourceWatcherService()) + ); } @Override diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java index 803264f6b73be..35ad5fe9532cd 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/CancelTests.java @@ -102,7 +102,7 @@ private void testCancel( ); // Checks that the all documents have been indexed and correctly counted - assertHitCount(client().prepareSearch(INDEX).setSize(0), numDocs); + assertHitCount(prepareSearch(INDEX).setSize(0), numDocs); assertThat(ALLOWED_OPERATIONS.drainPermits(), equalTo(0)); // Scroll by 1 so that cancellation is easier to control @@ -225,7 +225,7 @@ public void testReindexCancel() throws Exception { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request"))); refresh("dest"); - assertHitCount(client().prepareSearch("dest").setSize(0), modified); + assertHitCount(prepareSearch("dest").setSize(0), modified); }, equalTo("reindex from [" + INDEX + "] to [dest]")); } @@ -241,7 +241,7 @@ public void testUpdateByQueryCancel() throws Exception { testCancel(UpdateByQueryAction.NAME, updateByQuery().setPipeline("set-processed").source(INDEX), (response, total, modified) -> { assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request"))); - assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); + assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); }, equalTo("update-by-query [" + INDEX + "]")); assertAcked(clusterAdmin().deletePipeline(new DeletePipelineRequest("set-processed")).get()); @@ -253,7 +253,7 @@ public void testDeleteByQueryCancel() throws Exception { deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request"))); - assertHitCount(client().prepareSearch(INDEX).setSize(0), total - modified); + assertHitCount(prepareSearch(INDEX).setSize(0), total - modified); }, equalTo("delete-by-query [" + INDEX + "]") ); @@ -266,7 +266,7 @@ public void testReindexCancelWithWorkers() throws Exception { (response, total, modified) -> { assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); refresh("dest"); - assertHitCount(client().prepareSearch("dest").setSize(0), modified); + assertHitCount(prepareSearch("dest").setSize(0), modified); }, equalTo("reindex from [" + INDEX + "] to [dest]") ); @@ -287,7 +287,7 @@ public void testUpdateByQueryCancelWithWorkers() throws Exception { updateByQuery().setPipeline("set-processed").source(INDEX).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().updated(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); - assertHitCount(client().prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); + assertHitCount(prepareSearch(INDEX).setSize(0).setQuery(termQuery("processed", true)), modified); }, equalTo("update-by-query [" + INDEX + "]") ); @@ -301,7 +301,7 @@ public void testDeleteByQueryCancelWithWorkers() throws Exception { deleteByQuery().source(INDEX).filter(QueryBuilders.matchAllQuery()).setSlices(5), (response, total, modified) -> { assertThat(response, matcher().deleted(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5))); - assertHitCount(client().prepareSearch(INDEX).setSize(0), total - modified); + assertHitCount(prepareSearch(INDEX).setSize(0), total - modified); }, equalTo("delete-by-query [" + INDEX + "]") ); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java index cfcd78374bca0..2f2248e304989 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryBasicTests.java @@ -61,25 +61,25 @@ public void testBasics() throws Exception { client().prepareIndex("test").setId("7").setSource("foo", "f") ); - assertHitCount(client().prepareSearch("test").setSize(0), 7); + assertHitCount(prepareSearch("test").setSize(0), 7); // Deletes two docs that matches "foo:a" assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2)); - assertHitCount(client().prepareSearch("test").setSize(0), 5); + assertHitCount(prepareSearch("test").setSize(0), 5); // Deletes the two first docs with limit by size DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true); request.source().addSort("foo.keyword", SortOrder.ASC); assertThat(request.get(), matcher().deleted(2)); - assertHitCount(client().prepareSearch("test").setSize(0), 3); + assertHitCount(prepareSearch("test").setSize(0), 3); // Deletes but match no docs assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0)); - assertHitCount(client().prepareSearch("test").setSize(0), 3); + assertHitCount(prepareSearch("test").setSize(0), 3); // Deletes all remaining docs assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3)); - assertHitCount(client().prepareSearch("test").setSize(0), 0); + assertHitCount(prepareSearch("test").setSize(0), 0); } public void testDeleteByQueryWithOneIndex() throws Exception { @@ -92,7 +92,7 @@ public void testDeleteByQueryWithOneIndex() throws Exception { indexRandom(true, true, true, builders); assertThat(deleteByQuery().source("t*").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(docs)); - assertHitCount(client().prepareSearch("test").setSize(0), 0); + assertHitCount(prepareSearch("test").setSize(0), 0); } public void testDeleteByQueryWithMultipleIndices() throws Exception { @@ -122,15 +122,15 @@ public void testDeleteByQueryWithMultipleIndices() throws Exception { for (int i = 0; i < indices; i++) { long remaining = docs - candidates[i]; - assertHitCount(client().prepareSearch("test-" + i).setSize(0), remaining); + assertHitCount(prepareSearch("test-" + i).setSize(0), remaining); } - assertHitCount(client().prepareSearch().setSize(0), (indices * docs) - deletions); + assertHitCount(prepareSearch().setSize(0), (indices * docs) - deletions); } public void testDeleteByQueryWithMissingIndex() throws Exception { indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "a")); - assertHitCount(client().prepareSearch().setSize(0), 1); + assertHitCount(prepareSearch().setSize(0), 1); try { deleteByQuery().source("missing").filter(QueryBuilders.matchAllQuery()).get(); @@ -154,19 +154,19 @@ public void testDeleteByQueryWithRouting() throws Exception { indexRandom(true, true, true, builders); logger.info("--> counting documents with no routing, should be equal to [{}]", docs); - assertHitCount(client().prepareSearch().setSize(0), docs); + assertHitCount(prepareSearch().setSize(0), docs); String routing = String.valueOf(randomIntBetween(2, docs)); logger.info("--> counting documents with routing [{}]", routing); - long expected = client().prepareSearch().setSize(0).setRouting(routing).get().getHits().getTotalHits().value; + long expected = prepareSearch().setSize(0).setRouting(routing).get().getHits().getTotalHits().value; logger.info("--> delete all documents with routing [{}] with a delete-by-query", routing); DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()); delete.source().setRouting(routing); assertThat(delete.refresh(true).get(), matcher().deleted(expected)); - assertHitCount(client().prepareSearch().setSize(0), docs - expected); + assertHitCount(prepareSearch().setSize(0), docs - expected); } public void testDeleteByMatchQuery() throws Exception { @@ -186,13 +186,13 @@ public void testDeleteByMatchQuery() throws Exception { indexRandom(true, true, true, builders); int n = between(0, docs - 1); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(matchQuery("_id", Integer.toString(n))), 1); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs); + assertHitCount(prepareSearch("test").setSize(0).setQuery(matchQuery("_id", Integer.toString(n))), 1); + assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs); DeleteByQueryRequestBuilder delete = deleteByQuery().source("alias").filter(matchQuery("_id", Integer.toString(n))); assertThat(delete.refresh(true).get(), matcher().deleted(1L)); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs - 1); + assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.matchAllQuery()), docs - 1); } public void testDeleteByQueryWithDateMath() throws Exception { @@ -201,7 +201,7 @@ public void testDeleteByQueryWithDateMath() throws Exception { DeleteByQueryRequestBuilder delete = deleteByQuery().source("test").filter(rangeQuery("d").to("now-1h")); assertThat(delete.refresh(true).get(), matcher().deleted(1L)); - assertHitCount(client().prepareSearch("test").setSize(0), 0); + assertHitCount(prepareSearch("test").setSize(0), 0); } public void testDeleteByQueryOnReadOnlyIndex() throws Exception { @@ -224,7 +224,7 @@ public void testDeleteByQueryOnReadOnlyIndex() throws Exception { disableIndexBlock("test", SETTING_READ_ONLY); } - assertHitCount(client().prepareSearch("test").setSize(0), docs); + assertHitCount(prepareSearch("test").setSize(0), docs); } public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { @@ -280,9 +280,9 @@ public void testDeleteByQueryOnReadOnlyAllowDeleteIndex() throws Exception { } } if (diskAllocationDeciderEnabled) { - assertHitCount(client().prepareSearch("test").setSize(0), 0); + assertHitCount(prepareSearch("test").setSize(0), 0); } else { - assertHitCount(client().prepareSearch("test").setSize(0), docs); + assertHitCount(prepareSearch("test").setSize(0), docs); } } @@ -297,7 +297,7 @@ public void testSlices() throws Exception { client().prepareIndex("test").setId("6").setSource("foo", "e"), client().prepareIndex("test").setId("7").setSource("foo", "f") ); - assertHitCount(client().prepareSearch("test").setSize(0), 7); + assertHitCount(prepareSearch("test").setSize(0), 7); int slices = randomSlices(); int expectedSlices = expectedSliceStatuses(slices, "test"); @@ -307,14 +307,14 @@ public void testSlices() throws Exception { deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(), matcher().deleted(2).slices(hasSize(expectedSlices)) ); - assertHitCount(client().prepareSearch("test").setSize(0), 5); + assertHitCount(prepareSearch("test").setSize(0), 5); // Delete remaining docs assertThat( deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(), matcher().deleted(5).slices(hasSize(expectedSlices)) ); - assertHitCount(client().prepareSearch("test").setSize(0), 0); + assertHitCount(prepareSearch("test").setSize(0), 0); } public void testMultipleSources() throws Exception { @@ -333,7 +333,7 @@ public void testMultipleSources() throws Exception { List allDocs = docs.values().stream().flatMap(Collection::stream).collect(Collectors.toList()); indexRandom(true, allDocs); for (Map.Entry> entry : docs.entrySet()) { - assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().size()); + assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().size()); } int slices = randomSlices(1, 10); @@ -347,7 +347,7 @@ public void testMultipleSources() throws Exception { ); for (String index : docs.keySet()) { - assertHitCount(client().prepareSearch(index).setSize(0), 0); + assertHitCount(prepareSearch(index).setSize(0), 0); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryConcurrentTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryConcurrentTests.java index 2e5fd4803fbe4..81d00d98b1fec 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryConcurrentTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/DeleteByQueryConcurrentTests.java @@ -40,7 +40,7 @@ public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { final CountDownLatch start = new CountDownLatch(1); for (int t = 0; t < threads.length; t++) { final int threadNum = t; - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)), docs); + assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", threadNum)), docs); Runnable r = () -> { try { @@ -64,7 +64,7 @@ public void testConcurrentDeleteByQueriesOnDifferentDocs() throws Throwable { } for (int t = 0; t < threads.length; t++) { - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", t)), 0); + assertHitCount(prepareSearch("test").setSize(0).setQuery(QueryBuilders.termQuery("field", t)), 0); } } @@ -105,7 +105,7 @@ public void testConcurrentDeleteByQueriesOnSameDocs() throws Throwable { thread.join(); } - assertHitCount(client().prepareSearch("test").setSize(0), 0L); + assertHitCount(prepareSearch("test").setSize(0), 0L); assertThat(deleted.get(), equalTo(docs)); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexBasicTests.java index 3bc029058e705..45ca5a536f34f 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexBasicTests.java @@ -35,28 +35,28 @@ public void testFiltering() throws Exception { client().prepareIndex("source").setId("3").setSource("foo", "b"), client().prepareIndex("source").setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch("source").setSize(0), 4); + assertHitCount(prepareSearch("source").setSize(0), 4); // Copy all the docs ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true); assertThat(copy.get(), matcher().created(4)); - assertHitCount(client().prepareSearch("dest").setSize(0), 4); + assertHitCount(prepareSearch("dest").setSize(0), 4); // Now none of them createIndex("none"); copy = reindex().source("source").destination("none").filter(termQuery("foo", "no_match")).refresh(true); assertThat(copy.get(), matcher().created(0)); - assertHitCount(client().prepareSearch("none").setSize(0), 0); + assertHitCount(prepareSearch("none").setSize(0), 0); // Now half of them copy = reindex().source("source").destination("dest_half").filter(termQuery("foo", "a")).refresh(true); assertThat(copy.get(), matcher().created(2)); - assertHitCount(client().prepareSearch("dest_half").setSize(0), 2); + assertHitCount(prepareSearch("dest_half").setSize(0), 2); // Limit with maxDocs copy = reindex().source("source").destination("dest_size_one").maxDocs(1).refresh(true); assertThat(copy.get(), matcher().created(1)); - assertHitCount(client().prepareSearch("dest_size_one").setSize(0), 1); + assertHitCount(prepareSearch("dest_size_one").setSize(0), 1); } public void testCopyMany() throws Exception { @@ -67,14 +67,14 @@ public void testCopyMany() throws Exception { } indexRandom(true, docs); - assertHitCount(client().prepareSearch("source").setSize(0), max); + assertHitCount(prepareSearch("source").setSize(0), max); // Copy all the docs ReindexRequestBuilder copy = reindex().source("source").destination("dest").refresh(true); // Use a small batch size so we have to use more than one batch copy.source().setSize(5); assertThat(copy.get(), matcher().created(max).batches(max, 5)); - assertHitCount(client().prepareSearch("dest").setSize(0), max); + assertHitCount(prepareSearch("dest").setSize(0), max); // Copy some of the docs int half = max / 2; @@ -83,7 +83,7 @@ public void testCopyMany() throws Exception { copy.source().setSize(5); copy.maxDocs(half); assertThat(copy.get(), matcher().created(half).batches(half, 5)); - assertHitCount(client().prepareSearch("dest_half").setSize(0), half); + assertHitCount(prepareSearch("dest_half").setSize(0), half); } public void testCopyManyWithSlices() throws Exception { @@ -94,7 +94,7 @@ public void testCopyManyWithSlices() throws Exception { } indexRandom(true, docs); - assertHitCount(client().prepareSearch("source").setSize(0), max); + assertHitCount(prepareSearch("source").setSize(0), max); int slices = randomSlices(); int expectedSlices = expectedSliceStatuses(slices, "source"); @@ -104,7 +104,7 @@ public void testCopyManyWithSlices() throws Exception { // Use a small batch size so we have to use more than one batch copy.source().setSize(5); assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices))); - assertHitCount(client().prepareSearch("dest").setSize(0), max); + assertHitCount(prepareSearch("dest").setSize(0), max); // Copy some of the docs int half = max / 2; @@ -114,7 +114,7 @@ public void testCopyManyWithSlices() throws Exception { copy.maxDocs(half); BulkByScrollResponse response = copy.get(); assertThat(response, matcher().created(lessThanOrEqualTo((long) half)).slices(hasSize(expectedSlices))); - assertHitCount(client().prepareSearch("dest_half").setSize(0), response.getCreated()); + assertHitCount(prepareSearch("dest_half").setSize(0), response.getCreated()); } public void testMultipleSources() throws Exception { @@ -134,7 +134,7 @@ public void testMultipleSources() throws Exception { List allDocs = docs.values().stream().flatMap(Collection::stream).collect(Collectors.toList()); indexRandom(true, allDocs); for (Map.Entry> entry : docs.entrySet()) { - assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().size()); + assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().size()); } int slices = randomSlices(1, 10); @@ -145,7 +145,7 @@ public void testMultipleSources() throws Exception { BulkByScrollResponse response = request.get(); assertThat(response, matcher().created(allDocs.size()).slices(hasSize(expectedSlices))); - assertHitCount(client().prepareSearch("dest").setSize(0), allDocs.size()); + assertHitCount(prepareSearch("dest").setSize(0), allDocs.size()); } public void testMissingSources() { @@ -166,12 +166,12 @@ public void testReindexFromComplexDateMathIndexName() throws Exception { client().prepareIndex(sourceIndexName).setId("3").setSource("foo", "b"), client().prepareIndex(sourceIndexName).setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch(sourceIndexName).setSize(0), 4); + assertHitCount(prepareSearch(sourceIndexName).setSize(0), 4); // Copy all the docs ReindexRequestBuilder copy = reindex().source(sourceIndexName).destination(destIndexName).refresh(true); assertThat(copy.get(), matcher().created(4)); - assertHitCount(client().prepareSearch(destIndexName).setSize(0), 4); + assertHitCount(prepareSearch(destIndexName).setSize(0), 4); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFailureTests.java index 8a3066ebac83d..5e868598d165e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFailureTests.java @@ -128,7 +128,7 @@ public void testDateMathResolvesSameIndexName() throws Exception { client().prepareIndex(sourceIndexName).setId("3").setSource("foo", "b"), client().prepareIndex(sourceIndexName).setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch(sourceIndexName).setSize(0), 4); + assertHitCount(prepareSearch(sourceIndexName).setSize(0), 4); ActionRequestValidationException e = expectThrows( ActionRequestValidationException.class, diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java index 55b7b2cc902cc..5509e44b52a3e 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ReindexFromRemoteWithAuthTests.java @@ -19,38 +19,25 @@ import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilterChain; import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpInfo; import org.elasticsearch.index.reindex.ReindexAction; import org.elasticsearch.index.reindex.ReindexRequestBuilder; import org.elasticsearch.index.reindex.RemoteInfo; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestHeaderDefinition; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.root.MainRestPlugin; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.netty4.Netty4Plugin; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.junit.Before; import java.util.Arrays; @@ -58,7 +45,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.function.Supplier; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; @@ -164,23 +150,8 @@ public static class TestPlugin extends Plugin implements ActionPlugin { private final SetOnce testFilter = new SetOnce<>(); @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - testFilter.set(new ReindexFromRemoteWithAuthTests.TestFilter(threadPool)); + public Collection createComponents(PluginServices services) { + testFilter.set(new ReindexFromRemoteWithAuthTests.TestFilter(services.threadPool())); return Collections.emptyList(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryBasicTests.java index fb02abdab0f6a..f37c9b5891416 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/UpdateByQueryBasicTests.java @@ -34,7 +34,7 @@ public void testBasics() throws Exception { client().prepareIndex("test").setId("3").setSource("foo", "b"), client().prepareIndex("test").setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch("test").setSize(0), 4); + assertHitCount(prepareSearch("test").setSize(0), 4); assertEquals(1, client().prepareGet("test", "1").get().getVersion()); assertEquals(1, client().prepareGet("test", "4").get().getVersion()); @@ -74,7 +74,7 @@ public void testSlices() throws Exception { client().prepareIndex("test").setId("3").setSource("foo", "b"), client().prepareIndex("test").setId("4").setSource("foo", "c") ); - assertHitCount(client().prepareSearch("test").setSize(0), 4); + assertHitCount(prepareSearch("test").setSize(0), 4); assertEquals(1, client().prepareGet("test", "1").get().getVersion()); assertEquals(1, client().prepareGet("test", "4").get().getVersion()); @@ -124,7 +124,7 @@ public void testMultipleSources() throws Exception { List allDocs = docs.values().stream().flatMap(Collection::stream).collect(Collectors.toList()); indexRandom(true, allDocs); for (Map.Entry> entry : docs.entrySet()) { - assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().size()); + assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().size()); } int slices = randomSlices(1, 10); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java index 3d656bfa175d5..4b76045c5dc63 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/remote/RemoteScrollableHitSourceTests.java @@ -615,8 +615,7 @@ private class TestRemoteScrollableHitSource extends RemoteScrollableHitSource { } private RejectAwareActionListener wrapAsListener(Consumer consumer) { - Consumer throwing = e -> { throw new AssertionError(e); }; - return RejectAwareActionListener.wrap(consumer::accept, throwing, throwing); + return RejectAwareActionListener.wrap(consumer::accept, ESTestCase::fail, ESTestCase::fail); } @SuppressWarnings("unchecked") diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 36742765edad5..c71bbf02782ca 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Locale; @@ -109,7 +109,7 @@ public AzureRepository( recoverySettings, buildBasePath(metadata), buildLocation(metadata), - Meter.NOOP + MeterRegistry.NOOP ); this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); this.storageService = storageService; diff --git a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java index f88a96e765827..6ff9a40940e8c 100644 --- a/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java +++ b/modules/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java @@ -11,30 +11,19 @@ import com.azure.core.util.serializer.JacksonAdapter; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Arrays; @@ -42,7 +31,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.function.Supplier; /** * A plugin to add a repository type that writes to and from the Azure cloud storage service. @@ -84,23 +72,8 @@ public Map getRepositories( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - AzureClientProvider azureClientProvider = AzureClientProvider.create(threadPool, settings); + public Collection createComponents(PluginServices services) { + AzureClientProvider azureClientProvider = AzureClientProvider.create(services.threadPool(), settings); azureStoreService.set(createAzureStorageService(settings, azureClientProvider)); return List.of(azureClientProvider); } diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java index a7a10a17668bc..6f43f253db4c8 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -38,6 +38,8 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class AzureStorageServiceTests extends ESTestCase { private ThreadPool threadPool; @@ -73,7 +75,9 @@ public void testReadSecuredSettings() { private AzureRepositoryPlugin pluginWithSettingsValidation(Settings settings) { final AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings); new SettingsModule(settings, plugin.getSettings(), Collections.emptyList()); - plugin.createComponents(null, null, threadPool, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null); + Plugin.PluginServices services = mock(Plugin.PluginServices.class); + when(services.threadPool()).thenReturn(threadPool); + plugin.createComponents(services); return plugin; } diff --git a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index a42839c6d0174..21dd7529afaca 100644 --- a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -21,7 +21,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Map; @@ -78,7 +78,7 @@ class GoogleCloudStorageRepository extends MeteredBlobStoreRepository { recoverySettings, buildBasePath(metadata), buildLocation(metadata), - Meter.NOOP + MeterRegistry.NOOP ); this.storageService = storageService; diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index 5790fdb785bb6..b320364877e98 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -31,14 +31,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.plugins.TelemetryPlugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; @@ -51,11 +49,12 @@ import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.snapshots.mockstore.BlobStoreWrapper; -import org.elasticsearch.telemetry.DelegatingMeter; -import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.RecordingInstruments; +import org.elasticsearch.telemetry.RecordingMeterRegistry; +import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.Meter; -import org.elasticsearch.telemetry.tracing.Tracer; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -75,7 +74,6 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; import java.util.stream.Collectors; import java.util.stream.StreamSupport; @@ -141,7 +139,7 @@ protected Settings repositorySettings(String repoName) { @Override protected Collection> nodePlugins() { - return List.of(TestS3RepositoryPlugin.class, TestTelemetryPlugin.class); + return List.of(TestS3RepositoryPlugin.class, TestS3BlobTelemetryPlugin.class); } @Override @@ -198,7 +196,7 @@ public void testAbortRequestStats() throws Exception { flushAndRefresh(index); ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); // Intentionally fail snapshot to trigger abortMultipartUpload requests shouldFailCompleteMultipartUploadRequest.set(true); @@ -240,14 +238,14 @@ public void testMetrics() throws Exception { flushAndRefresh(index); ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); final String snapshot = "snapshot"; assertSuccessfulSnapshot(clusterAdmin().prepareCreateSnapshot(repository, snapshot).setWaitForCompletion(true).setIndices(index)); assertAcked(client().admin().indices().prepareDelete(index)); assertSuccessfulRestore(clusterAdmin().prepareRestoreSnapshot(repository, snapshot).setWaitForCompletion(true)); ensureGreen(index); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); assertAcked(clusterAdmin().prepareDeleteSnapshot(repository, snapshot).get()); final Map aggregatedMetrics = new HashMap<>(); @@ -267,21 +265,25 @@ public void testMetrics() throws Exception { final Map statsCollectors = s3BlobStore .getStatsCollectors().collectors; - final var plugins = internalCluster().getInstance(PluginsService.class, nodeName).filterPlugins(TestTelemetryPlugin.class); + final var plugins = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestS3BlobTelemetryPlugin.class); assertThat(plugins, hasSize(1)); - final Map, AtomicLong> metrics = plugins.get(0).metrics; + final List metrics = Measurement.combine(plugins.get(0).getLongCounterMeasurement(METRIC_REQUESTS_COUNT)); - assertThat(statsCollectors.size(), equalTo(metrics.size())); - metrics.forEach((attributes, counter) -> { - final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) attributes.get("operation")); + assertThat( + statsCollectors.size(), + equalTo(metrics.stream().map(m -> m.attributes().get("operation")).collect(Collectors.toSet()).size()) + ); + metrics.forEach(metric -> { + final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation")); final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey( operation, - OperationPurpose.parse((String) attributes.get("purpose")) + OperationPurpose.parse((String) metric.attributes().get("purpose")) ); assertThat(statsCollectors, hasKey(statsKey)); - assertThat(counter.get(), equalTo(statsCollectors.get(statsKey).counter.sum())); + assertThat(metric.getLong(), equalTo(statsCollectors.get(statsKey).counter.sum())); - aggregatedMetrics.compute(operation.getKey(), (k, v) -> v == null ? counter.get() : v + counter.get()); + aggregatedMetrics.compute(operation.getKey(), (k, v) -> v == null ? metric.getLong() : v + metric.getLong()); }); } @@ -433,7 +435,7 @@ protected S3Repository createRepository( BigArrays bigArrays, RecoverySettings recoverySettings ) { - return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, getMeter()) { + return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, getMeterRegistry()) { @Override public BlobStore blobStore() { @@ -554,63 +556,45 @@ private boolean isMultiPartUpload(String request) { } } - public static class TestTelemetryPlugin extends Plugin implements TelemetryPlugin { - - private final Map, AtomicLong> metrics = ConcurrentCollections.newConcurrentMap(); - - private final LongCounter longCounter = new LongCounter() { - @Override - public void increment() { - throw new UnsupportedOperationException(); - } + public static class TestS3BlobTelemetryPlugin extends TestTelemetryPlugin { + protected final MeterRegistry meter = new RecordingMeterRegistry() { + private final LongCounter longCounter = new RecordingInstruments.RecordingLongCounter(METRIC_REQUESTS_COUNT, recorder) { + @Override + public void increment() { + throw new UnsupportedOperationException(); + } - @Override - public void incrementBy(long inc) { - throw new UnsupportedOperationException(); - } + @Override + public void incrementBy(long inc) { + throw new UnsupportedOperationException(); + } - @Override - public void incrementBy(long inc, Map attributes) { - assertThat( - attributes, - allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) - ); - metrics.computeIfAbsent(attributes, k -> new AtomicLong()).addAndGet(inc); - } + @Override + public void incrementBy(long inc, Map attributes) { + assertThat( + attributes, + allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); + super.incrementBy(inc, attributes); + } + }; @Override - public String getName() { - return METRIC_REQUESTS_COUNT; + protected LongCounter buildLongCounter(String name, String description, String unit) { + return longCounter; } - }; - private final Meter meter = new DelegatingMeter(Meter.NOOP) { @Override public LongCounter registerLongCounter(String name, String description, String unit) { assertThat(name, equalTo(METRIC_REQUESTS_COUNT)); - return longCounter; + return super.registerLongCounter(name, description, unit); } @Override public LongCounter getLongCounter(String name) { assertThat(name, equalTo(METRIC_REQUESTS_COUNT)); - return longCounter; + return super.getLongCounter(name); } }; - - @Override - public TelemetryProvider getTelemetryProvider(Settings settings) { - return new TelemetryProvider() { - @Override - public Tracer getTracer() { - return Tracer.NOOP; - } - - @Override - public Meter getMeter() { - return meter; - } - }; - } } } diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java index 0e2e38a5af224..8c6ea9f6edf3b 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3RepositoryThirdPartyTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.AbstractThirdPartyRepositoryTestCase; import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -67,6 +67,26 @@ protected SecureSettings credentials() { return secureSettings; } + @Override + protected Settings nodeSettings() { + final var settings = Settings.builder().put(super.nodeSettings()); + if (randomBoolean()) { + final var defaultMillis = S3Service.REPOSITORY_S3_CAS_TTL_SETTING.get(Settings.EMPTY).millis(); + settings.put( + S3Service.REPOSITORY_S3_CAS_TTL_SETTING.getKey(), + TimeValue.timeValueMillis(randomLongBetween(defaultMillis, defaultMillis * 2)) + ); + } + if (randomBoolean()) { + final var defaultMillis = S3Service.REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(Settings.EMPTY).millis(); + settings.put( + S3Service.REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.getKey(), + TimeValue.timeValueMillis(randomLongBetween(defaultMillis, defaultMillis * 2)) + ); + } + return settings.build(); + } + @Override protected void createRepository(String repoName) { Settings.Builder settings = Settings.builder() @@ -113,7 +133,7 @@ public long absoluteTimeInMillis() { ClusterServiceUtils.createClusterService(threadpool), BigArrays.NON_RECYCLING_INSTANCE, new RecoverySettings(node().settings(), node().injector().getInstance(ClusterService.class).getClusterSettings()), - Meter.NOOP + MeterRegistry.NOOP ) ) { repository.start(); diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 04bdf7b637e27..c0b64c5c672f6 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -32,6 +32,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; @@ -65,7 +66,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.stream.Collectors; @@ -580,49 +580,70 @@ private class CompareAndExchangeOperation { this.threadPool = threadPool; } - private List listMultipartUploads() { - final var listRequest = new ListMultipartUploadsRequest(bucket); - listRequest.setPrefix(blobKey); - listRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); - try { - return SocketAccess.doPrivileged(() -> client.listMultipartUploads(listRequest)).getMultipartUploads(); - } catch (AmazonS3Exception e) { - if (e.getStatusCode() == 404) { - return List.of(); - } - throw e; + void run(BytesReference expected, BytesReference updated, ActionListener listener) throws Exception { + BlobContainerUtils.ensureValidRegisterContent(updated); + + if (hasPreexistingUploads()) { + // This is a small optimization to improve the liveness properties of this algorithm. + // + // We can safely proceed even if there are other uploads in progress, but that would add to the potential for collisions and + // delays. Thus in this case we prefer avoid disturbing the ongoing attempts and just fail up front. + listener.onResponse(OptionalBytesReference.MISSING); + return; } - } - private int getUploadIndex(String targetUploadId, List multipartUploads) { - var uploadIndex = 0; - var found = false; - for (MultipartUpload multipartUpload : multipartUploads) { - final var observedUploadId = multipartUpload.getUploadId(); - if (observedUploadId.equals(targetUploadId)) { - final var currentTimeMillis = blobStore.getThreadPool().absoluteTimeInMillis(); - final var ageMillis = currentTimeMillis - multipartUpload.getInitiated().toInstant().toEpochMilli(); - final var expectedAgeRangeMillis = blobStore.getCompareAndExchangeTimeToLive().millis(); - if (ageMillis < -expectedAgeRangeMillis || ageMillis > expectedAgeRangeMillis) { - logger.warn( - """ - compare-and-exchange of blob [{}:{}] was initiated at [{}={}] \ - which deviates from local node epoch time [{}] by more than the warn threshold of [{}ms]""", - bucket, - blobKey, - multipartUpload.getInitiated(), - multipartUpload.getInitiated().toInstant().toEpochMilli(), - currentTimeMillis, - expectedAgeRangeMillis - ); - } - found = true; - } else if (observedUploadId.compareTo(targetUploadId) < 0) { - uploadIndex += 1; - } + // Step 1: Start our upload and upload the new contents as its unique part. + + final var uploadId = initiateMultipartUpload(); + final var partETag = uploadPart(updated, uploadId); + + // Step 2: List all uploads that are racing to complete, and compute our position in the list. This definitely includes all the + // uploads that started before us and are still in-progress, and may include some later-started in-progress ones too. + + final var currentUploads = listMultipartUploads(); + final var uploadIndex = getUploadIndex(uploadId, currentUploads); + + if (uploadIndex < 0) { + // already aborted by someone else + listener.onResponse(OptionalBytesReference.MISSING); + return; } - return found ? uploadIndex : -1; + SubscribableListener + + // Step 3: Ensure all other uploads in currentUploads are complete (either successfully, aborted by us or by another upload) + + .newForked(l -> ensureOtherUploadsComplete(uploadId, uploadIndex, currentUploads, l)) + + // Step 4: Read the current register value. + + .andThen((l, ignored) -> getRegister(purpose, rawKey, l)) + + // Step 5: Perform the compare-and-swap by completing our upload iff the witnessed value matches the expected value. + + .andThen((l, currentValue) -> ActionListener.completeWith(l, () -> { + if (currentValue.isPresent() && currentValue.bytesReference().equals(expected)) { + completeMultipartUpload(uploadId, partETag); + } else { + // Best-effort attempt to clean up after ourselves. + safeAbortMultipartUpload(uploadId); + } + return currentValue; + })) + + // Step 6: Complete the listener. + + .addListener(listener.delegateResponse((l, e) -> { + // Best-effort attempt to clean up after ourselves. + safeAbortMultipartUpload(uploadId); + l.onFailure(e); + })); + + // No compare-and-exchange operations that started before ours can write to the register (in its step 5) after we have read the + // current value of the register (in our step 4) because we have ensured all earlier operations have completed (in our step 3). + // Conversely, if some other compare-and-exchange operation started after us then it will not read the register (in its step 4) + // until it has ensured we will not do a future write to the register (in our step 5) by cancelling all the racing uploads that + // it observed (in its step 3). Thus steps 4 and 5 can only complete successfully with no intervening writes to the register. } /** @@ -656,24 +677,27 @@ private boolean hasPreexistingUploads() { return false; } - void run(BytesReference expected, BytesReference updated, ActionListener listener) throws Exception { - BlobContainerUtils.ensureValidRegisterContent(updated); - - if (hasPreexistingUploads()) { - - // This is a small optimization to improve the liveness properties of this algorithm. - // - // We can safely proceed even if there are other uploads in progress, but that would add to the potential for collisions and - // delays. Thus in this case we prefer avoid disturbing the ongoing attempts and just fail up front. - - listener.onResponse(OptionalBytesReference.MISSING); - return; + private List listMultipartUploads() { + final var listRequest = new ListMultipartUploadsRequest(bucket); + listRequest.setPrefix(blobKey); + listRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.LIST_OBJECTS, purpose)); + try { + return SocketAccess.doPrivileged(() -> client.listMultipartUploads(listRequest)).getMultipartUploads(); + } catch (AmazonS3Exception e) { + if (e.getStatusCode() == 404) { + return List.of(); + } + throw e; } + } + private String initiateMultipartUpload() { final var initiateRequest = new InitiateMultipartUploadRequest(bucket, blobKey); initiateRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); - final var uploadId = SocketAccess.doPrivileged(() -> client.initiateMultipartUpload(initiateRequest)).getUploadId(); + return SocketAccess.doPrivileged(() -> client.initiateMultipartUpload(initiateRequest)).getUploadId(); + } + private PartETag uploadPart(BytesReference updated, String uploadId) throws IOException { final var uploadPartRequest = new UploadPartRequest(); uploadPartRequest.setBucketName(bucket); uploadPartRequest.setKey(blobKey); @@ -683,83 +707,81 @@ void run(BytesReference expected, BytesReference updated, ActionListener client.uploadPart(uploadPartRequest)).getPartETag(); - - final var currentUploads = listMultipartUploads(); - final var uploadIndex = getUploadIndex(uploadId, currentUploads); + return SocketAccess.doPrivileged(() -> client.uploadPart(uploadPartRequest)).getPartETag(); + } - if (uploadIndex < 0) { - // already aborted by someone else - listener.onResponse(OptionalBytesReference.MISSING); - return; + private int getUploadIndex(String targetUploadId, List multipartUploads) { + var uploadIndex = 0; + var found = false; + for (MultipartUpload multipartUpload : multipartUploads) { + final var observedUploadId = multipartUpload.getUploadId(); + if (observedUploadId.equals(targetUploadId)) { + final var currentTimeMillis = blobStore.getThreadPool().absoluteTimeInMillis(); + final var ageMillis = currentTimeMillis - multipartUpload.getInitiated().toInstant().toEpochMilli(); + final var expectedAgeRangeMillis = blobStore.getCompareAndExchangeTimeToLive().millis(); + if (ageMillis < -expectedAgeRangeMillis || ageMillis > expectedAgeRangeMillis) { + logger.warn( + """ + compare-and-exchange of blob [{}:{}] was initiated at [{}={}] \ + which deviates from local node epoch time [{}] by more than the warn threshold of [{}ms]""", + bucket, + blobKey, + multipartUpload.getInitiated(), + multipartUpload.getInitiated().toInstant().toEpochMilli(), + currentTimeMillis, + expectedAgeRangeMillis + ); + } + found = true; + } else if (observedUploadId.compareTo(targetUploadId) < 0) { + uploadIndex += 1; + } } - final var isComplete = new AtomicBoolean(); - final Runnable doCleanup = () -> { - if (isComplete.compareAndSet(false, true)) { - safeAbortMultipartUpload(uploadId); - } - }; + return found ? uploadIndex : -1; + } - try ( - var listeners = new RefCountingListener( - ActionListener.runAfter( - listener.delegateFailure( - (delegate1, ignored) -> getRegister( - purpose, - rawKey, - delegate1.delegateFailure((delegate2, currentValue) -> ActionListener.completeWith(delegate2, () -> { - if (currentValue.isPresent() && currentValue.bytesReference().equals(expected)) { - final var completeMultipartUploadRequest = new CompleteMultipartUploadRequest( - bucket, - blobKey, - uploadId, - List.of(partETag) - ); - completeMultipartUploadRequest.setRequestMetricCollector( - blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose) - ); - SocketAccess.doPrivilegedVoid(() -> client.completeMultipartUpload(completeMultipartUploadRequest)); - isComplete.set(true); - } - return currentValue; - })) - ) - ), - doCleanup - ) - ) - ) { - if (currentUploads.size() > 1) { - // This is a small optimization to improve the liveness properties of this algorithm. - // - // When there are multiple competing updates, we order them by upload id and the first one tries to cancel the competing - // updates in order to make progress. To avoid liveness issues when the winner fails, the rest wait based on their - // upload_id-based position and try to make progress. - - var delayListener = listeners.acquire(); - final Runnable cancelConcurrentUpdates = () -> { - try { - for (MultipartUpload currentUpload : currentUploads) { - final var currentUploadId = currentUpload.getUploadId(); - if (uploadId.equals(currentUploadId) == false) { - blobStore.getSnapshotExecutor() - .execute(ActionRunnable.run(listeners.acquire(), () -> safeAbortMultipartUpload(currentUploadId))); - } - } - } finally { - delayListener.onResponse(null); - } - }; + private void ensureOtherUploadsComplete( + String uploadId, + int uploadIndex, + List currentUploads, + ActionListener listener + ) { + // This is a small optimization to improve the liveness properties of this algorithm. + // + // When there are updates racing to complete, we try and let them complete in order of their upload IDs. The one with the first + // upload ID immediately tries to cancel the competing updates in order to make progress, but the ones with greater upload IDs + // wait based on their position in the list before proceeding. + // + // Note that this does not guarantee that any of the uploads actually succeeds. Another operation could start and see a + // different collection of racing uploads and cancel all of them while they're sleeping. In theory this whole thing is provably + // impossible anyway [1] but in practice it'll eventually work with sufficient retries. + // + // [1] Michael J. Fischer, Nancy A. Lynch, and Michael S. Paterson. 1985. Impossibility of distributed consensus with one faulty + // process. J. ACM 32, 2 (April 1985), 374–382. + // + // TODO should we sort these by initiation time (and then upload ID as a tiebreaker)? + // TODO should we listMultipartUploads() while waiting, so we can fail quicker if we are concurrently cancelled? + if (uploadIndex > 0) { + threadPool.scheduleUnlessShuttingDown( + TimeValue.timeValueMillis( + uploadIndex * blobStore.getCompareAndExchangeAntiContentionDelay().millis() + Randomness.get().nextInt(50) + ), + blobStore.getSnapshotExecutor(), + ActionRunnable.wrap(listener, l -> cancelOtherUploads(uploadId, currentUploads, l)) + ); + } else { + cancelOtherUploads(uploadId, currentUploads, listener); + } + } - if (uploadIndex > 0) { - threadPool.scheduleUnlessShuttingDown( - TimeValue.timeValueMillis(TimeValue.timeValueSeconds(uploadIndex).millis() + Randomness.get().nextInt(50)), - blobStore.getSnapshotExecutor(), - cancelConcurrentUpdates - ); - } else { - cancelConcurrentUpdates.run(); + private void cancelOtherUploads(String uploadId, List currentUploads, ActionListener listener) { + final var executor = blobStore.getSnapshotExecutor(); + try (var listeners = new RefCountingListener(listener)) { + for (final var currentUpload : currentUploads) { + final var currentUploadId = currentUpload.getUploadId(); + if (uploadId.equals(currentUploadId) == false) { + executor.execute(ActionRunnable.run(listeners.acquire(), () -> abortMultipartUploadIfExists(currentUploadId))); } } } @@ -787,6 +809,11 @@ private void abortMultipartUploadIfExists(String uploadId) { } } + private void completeMultipartUpload(String uploadId, PartETag partETag) { + final var completeMultipartUploadRequest = new CompleteMultipartUploadRequest(bucket, blobKey, uploadId, List.of(partETag)); + completeMultipartUploadRequest.setRequestMetricCollector(blobStore.getMetricCollector(Operation.PUT_MULTIPART_OBJECT, purpose)); + SocketAccess.doPrivilegedVoid(() -> client.completeMultipartUpload(completeMultipartUploadRequest)); + } } @Override diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 1371e51017bee..25a2c4d8e1613 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -32,7 +32,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.TimeValue; import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -80,7 +80,7 @@ class S3BlobStore implements BlobStore { private final ThreadPool threadPool; private final Executor snapshotExecutor; - private final Meter meter; + private final MeterRegistry meterRegistry; private final LongCounter requestCounter; private final StatsCollectors statsCollectors = new StatsCollectors(); @@ -99,7 +99,7 @@ class S3BlobStore implements BlobStore { RepositoryMetadata repositoryMetadata, BigArrays bigArrays, ThreadPool threadPool, - Meter meter + MeterRegistry meterRegistry ) { this.service = service; this.bigArrays = bigArrays; @@ -111,8 +111,8 @@ class S3BlobStore implements BlobStore { this.repositoryMetadata = repositoryMetadata; this.threadPool = threadPool; this.snapshotExecutor = threadPool.executor(ThreadPool.Names.SNAPSHOT); - this.meter = meter; - this.requestCounter = this.meter.getLongCounter(METRIC_REQUESTS_COUNT); + this.meterRegistry = meterRegistry; + this.requestCounter = this.meterRegistry.getLongCounter(METRIC_REQUESTS_COUNT); s3RequestRetryStats = new S3RequestRetryStats(getMaxRetries()); threadPool.scheduleWithFixedDelay(() -> { var priorRetryStats = s3RequestRetryStats; @@ -140,6 +140,10 @@ public TimeValue getCompareAndExchangeTimeToLive() { return service.compareAndExchangeTimeToLive; } + public TimeValue getCompareAndExchangeAntiContentionDelay() { + return service.compareAndExchangeAntiContentionDelay; + } + // metrics collector that ignores null responses that we interpret as the request not reaching the S3 endpoint due to a network // issue class IgnoreNoResponseMetricsCollector extends RequestMetricCollector { diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index 02fe197077530..ddab811fcb078 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.repositories.FinalizeSnapshotContext; @@ -36,7 +36,7 @@ import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotsService; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -153,12 +153,12 @@ class S3Repository extends MeteredBlobStoreRepository { /** * Artificial delay to introduce after a snapshot finalization or delete has finished so long as the repository is still using the * backwards compatible snapshot format from before - * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} ({@link IndexVersion#V_7_6_0}). + * {@link org.elasticsearch.snapshots.SnapshotsService#SHARD_GEN_IN_REPO_DATA_VERSION} ({@link IndexVersions#V_7_6_0}). * This delay is necessary so that the eventually consistent nature of AWS S3 does not randomly result in repository corruption when * doing repository operations in rapid succession on a repository in the old metadata format. * This setting should not be adjusted in production when working with an AWS S3 backed repository. Doing so risks the repository * becoming silently corrupted. To get rid of this waiting period, either create a new S3 repository or remove all snapshots older than - * {@link IndexVersion#V_7_6_0} from the repository which will trigger an upgrade of the repository metadata to the new + * {@link IndexVersions#V_7_6_0} from the repository which will trigger an upgrade of the repository metadata to the new * format and disable the cooldown period. */ static final Setting COOLDOWN_PERIOD = Setting.timeSetting( @@ -205,7 +205,7 @@ class S3Repository extends MeteredBlobStoreRepository { final ClusterService clusterService, final BigArrays bigArrays, final RecoverySettings recoverySettings, - final Meter meter + final MeterRegistry meterRegistry ) { super( metadata, @@ -215,7 +215,7 @@ class S3Repository extends MeteredBlobStoreRepository { recoverySettings, buildBasePath(metadata), buildLocation(metadata), - meter + meterRegistry ); this.service = service; this.snapshotExecutor = threadPool().executor(ThreadPool.Names.SNAPSHOT); @@ -408,7 +408,7 @@ protected S3BlobStore createBlobStore() { metadata, bigArrays, threadPool, - meter + meterRegistry ); } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index 4a8d4ab6bab18..97c065e771ffd 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -12,29 +12,18 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.RepositoryMetadata; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.telemetry.metric.Meter; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; @@ -45,7 +34,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.function.Supplier; /** * A plugin to add a repository type that writes to and from the AWS S3. @@ -69,7 +57,7 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo } private final SetOnce service = new SetOnce<>(); - private final SetOnce meter = new SetOnce<>(); + private final SetOnce meterRegistry = new SetOnce<>(); private final Settings settings; public S3RepositoryPlugin(Settings settings) { @@ -88,29 +76,14 @@ protected S3Repository createRepository( final BigArrays bigArrays, final RecoverySettings recoverySettings ) { - return new S3Repository(metadata, registry, service.get(), clusterService, bigArrays, recoverySettings, meter.get()); + return new S3Repository(metadata, registry, service.get(), clusterService, bigArrays, recoverySettings, meterRegistry.get()); } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - service.set(s3Service(environment, clusterService.getSettings())); + public Collection createComponents(PluginServices services) { + service.set(s3Service(services.environment(), services.clusterService().getSettings())); this.service.get().refreshAndClearCache(S3ClientSettings.load(settings)); - meter.set(telemetryProvider.getMeter()); + meterRegistry.set(services.telemetryProvider().getMeterRegistry()); return List.of(service); } @@ -151,6 +124,8 @@ public List> getSettings() { S3ClientSettings.USE_PATH_STYLE_ACCESS, S3ClientSettings.SIGNER_OVERRIDE, S3ClientSettings.REGION, + S3Service.REPOSITORY_S3_CAS_TTL_SETTING, + S3Service.REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING, S3Repository.ACCESS_KEY_SETTING, S3Repository.SECRET_KEY_SETTING ); @@ -168,7 +143,7 @@ public void close() throws IOException { getService().close(); } - protected Meter getMeter() { - return meter.get(); + protected MeterRegistry getMeterRegistry() { + return meterRegistry.get(); } } diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java index ddacb24be7118..291cf84019cd1 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Service.java @@ -54,12 +54,20 @@ class S3Service implements Closeable { private static final Logger LOGGER = LogManager.getLogger(S3Service.class); - private static final Setting REPOSITORY_S3_CAS_TTL_SETTING = Setting.timeSetting( + static final Setting REPOSITORY_S3_CAS_TTL_SETTING = Setting.timeSetting( "repository_s3.compare_and_exchange.time_to_live", StoreHeartbeatService.HEARTBEAT_FREQUENCY, Setting.Property.NodeScope ); + static final Setting REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING = Setting.timeSetting( + "repository_s3.compare_and_exchange.anti_contention_delay", + TimeValue.timeValueSeconds(1), + TimeValue.timeValueMillis(1), + TimeValue.timeValueHours(24), + Setting.Property.NodeScope + ); + private volatile Map clientsCache = emptyMap(); /** @@ -79,6 +87,7 @@ class S3Service implements Closeable { final CustomWebIdentityTokenCredentialsProvider webIdentityTokenCredentialsProvider; final TimeValue compareAndExchangeTimeToLive; + final TimeValue compareAndExchangeAntiContentionDelay; S3Service(Environment environment, Settings nodeSettings) { webIdentityTokenCredentialsProvider = new CustomWebIdentityTokenCredentialsProvider( @@ -88,6 +97,7 @@ class S3Service implements Closeable { Clock.systemUTC() ); compareAndExchangeTimeToLive = REPOSITORY_S3_CAS_TTL_SETTING.get(nodeSettings); + compareAndExchangeAntiContentionDelay = REPOSITORY_S3_CAS_ANTI_CONTENTION_DELAY_SETTING.get(nodeSettings); } /** diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 2cdcc111b01a6..a587f0c731497 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -31,7 +31,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.admin.cluster.RestGetRepositoriesAction; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -264,7 +264,7 @@ protected S3Repository createRepository( BigArrays bigArrays, RecoverySettings recoverySettings ) { - return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, Meter.NOOP) { + return new S3Repository(metadata, registry, getService(), clusterService, bigArrays, recoverySettings, MeterRegistry.NOOP) { @Override protected void assertSnapshotOrGenericThread() { // eliminate thread name check as we create repo manually on test/main threads diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index a48fd2474bc59..3875181f98ece 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -36,7 +36,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.blobstore.AbstractBlobContainerRetriesTestCase; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.hamcrest.Matcher; import org.junit.After; import org.junit.Before; @@ -158,7 +158,7 @@ protected BlobContainer createBlobContainer( repositoryMetadata, BigArrays.NON_RECYCLING_INSTANCE, new DeterministicTaskQueue().getThreadPool(), - Meter.NOOP + MeterRegistry.NOOP ) ) { @Override diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index c38c8b764af41..db477c16a57e7 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -20,7 +20,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreTestUtil; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.hamcrest.Matchers; @@ -129,7 +129,7 @@ private S3Repository createS3Repo(RepositoryMetadata metadata) { BlobStoreTestUtil.mockClusterService(), MockBigArrays.NON_RECYCLING_INSTANCE, new RecoverySettings(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), - Meter.NOOP + MeterRegistry.NOOP ) { @Override protected void assertSnapshotOrGenericThread() { diff --git a/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java b/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java index 8057684375d69..fe33051df342e 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/plugin/repository/url/URLRepositoryPlugin.java @@ -9,28 +9,17 @@ package org.elasticsearch.plugin.repository.url; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.blobstore.url.http.URLHttpClient; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.IOUtils; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.url.URLRepository; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import java.io.IOException; @@ -39,7 +28,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.function.Supplier; public class URLRepositoryPlugin extends Plugin implements RepositoryPlugin { private final SetOnce httpClientFactory = new SetOnce<>(); @@ -76,22 +64,7 @@ public Map getRepositories( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { final URLHttpClient.Factory apacheURLHttpClientFactory = new URLHttpClient.Factory(); diff --git a/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java b/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java index dc61abf33e286..e715638ee52bb 100644 --- a/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java +++ b/modules/rest-root/src/test/java/org/elasticsearch/rest/root/MainResponseTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -67,7 +68,7 @@ public void testToXContent() throws IOException { build.isSnapshot(), indexVersion.luceneVersion().toString(), build.minWireCompatVersion(), - Build.minimumCompatString(IndexVersion.MINIMUM_COMPATIBLE) + Build.minimumCompatString(IndexVersions.MINIMUM_COMPATIBLE) ) ), Strings.toString(builder) diff --git a/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/RuntimeFieldsCommonPlugin.java b/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/RuntimeFieldsCommonPlugin.java index 6cad1e057ef74..b0adda3b2062b 100644 --- a/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/RuntimeFieldsCommonPlugin.java +++ b/modules/runtime-fields-common/src/main/java/org/elasticsearch/runtimefields/RuntimeFieldsCommonPlugin.java @@ -8,28 +8,13 @@ package org.elasticsearch.runtimefields; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.List; -import java.util.function.Supplier; /** * The plugin class for all the runtime fields common functionality that requires large dependencies. @@ -64,23 +49,8 @@ public List> getSettings() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - grokHelper.finishInitializing(threadPool); + public Collection createComponents(PluginServices services) { + grokHelper.finishInitializing(services.threadPool()); return List.of(); } diff --git a/modules/systemd/src/main/java/org/elasticsearch/systemd/SystemdPlugin.java b/modules/systemd/src/main/java/org/elasticsearch/systemd/SystemdPlugin.java index f425de279129b..e3dca57472ade 100644 --- a/modules/systemd/src/main/java/org/elasticsearch/systemd/SystemdPlugin.java +++ b/modules/systemd/src/main/java/org/elasticsearch/systemd/SystemdPlugin.java @@ -12,29 +12,14 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Build; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.List; -import java.util.function.Supplier; public class SystemdPlugin extends Plugin implements ClusterPlugin { @@ -80,22 +65,7 @@ Scheduler.Cancellable extender() { } @Override - public Collection createComponents( - final Client client, - final ClusterService clusterService, - final ThreadPool threadPool, - final ResourceWatcherService resourceWatcherService, - final ScriptService scriptService, - final NamedXContentRegistry xContentRegistry, - final Environment environment, - final NodeEnvironment nodeEnvironment, - final NamedWriteableRegistry namedWriteableRegistry, - final IndexNameExpressionResolver expressionResolver, - final Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { if (enabled == false) { extender.set(null); return List.of(); @@ -107,7 +77,7 @@ public Collection createComponents( * Therefore, every fifteen seconds we send systemd a message via sd_notify to extend the timeout by thirty seconds. We will cancel * this scheduled task after we successfully notify systemd that we are ready. */ - extender.set(threadPool.scheduleWithFixedDelay(() -> { + extender.set(services.threadPool().scheduleWithFixedDelay(() -> { final int rc = sd_notify(0, "EXTEND_TIMEOUT_USEC=30000000"); if (rc < 0) { logger.warn("extending startup timeout via sd_notify failed with [{}]", rc); diff --git a/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java b/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java index b2bfbdb976eb9..ed5fdb79c4ba6 100644 --- a/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java +++ b/modules/systemd/src/test/java/org/elasticsearch/systemd/SystemdPluginTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.OptionalMatchers; import org.elasticsearch.threadpool.Scheduler; @@ -58,30 +58,36 @@ public class SystemdPluginTests extends ESTestCase { ).thenReturn(extender); } + private void startPlugin(SystemdPlugin plugin) { + Plugin.PluginServices services = mock(Plugin.PluginServices.class); + when(services.threadPool()).thenReturn(threadPool); + plugin.createComponents(services); + } + public void testIsEnabled() { final SystemdPlugin plugin = new SystemdPlugin(false, randomPackageBuildType, Boolean.TRUE.toString()); - plugin.createComponents(null, null, threadPool, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null); + startPlugin(plugin); assertTrue(plugin.isEnabled()); assertNotNull(plugin.extender()); } public void testIsNotPackageDistribution() { final SystemdPlugin plugin = new SystemdPlugin(false, randomNonPackageBuildType, Boolean.TRUE.toString()); - plugin.createComponents(null, null, threadPool, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null); + startPlugin(plugin); assertFalse(plugin.isEnabled()); assertNull(plugin.extender()); } public void testIsImplicitlyNotEnabled() { final SystemdPlugin plugin = new SystemdPlugin(false, randomPackageBuildType, null); - plugin.createComponents(null, null, threadPool, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null); + startPlugin(plugin); assertFalse(plugin.isEnabled()); assertNull(plugin.extender()); } public void testIsExplicitlyNotEnabled() { final SystemdPlugin plugin = new SystemdPlugin(false, randomPackageBuildType, Boolean.FALSE.toString()); - plugin.createComponents(null, null, threadPool, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null); + startPlugin(plugin); assertFalse(plugin.isEnabled()); assertNull(plugin.extender()); } @@ -169,7 +175,7 @@ int sd_notify(final int unset_environment, final String state) { } }; - plugin.createComponents(null, null, threadPool, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null); + startPlugin(plugin); if (Boolean.TRUE.toString().equals(esSDNotify)) { assertNotNull(plugin.extender()); } else { diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java index d1c4cb5a6e4c0..348e9f5fae7c8 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/plugin/analysis/phonetic/AnalysisPhoneticFactoryTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -42,7 +43,7 @@ public void testDisallowedWithSynonyms() throws IOException { Settings settings = Settings.builder() .put( IndexMetadata.SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersion.current()) ) .put("path.home", createTempDir().toString()) .build(); diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index b21f6224c9fc2..5107bb9051bd1 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -95,9 +95,9 @@ tasks.named("test").configure { // this is needed to manipulate com.amazonaws.sdk.ec2MetadataServiceEndpointOverride system property // it is better rather disable security manager at all with `systemProperty 'tests.security.manager', 'false'` if (BuildParams.inFipsJvm){ - systemProperty 'java.security.policy', "=file://${buildDir}/tmp/java.policy" + nonInputProperties.systemProperty 'java.security.policy', "=file://${buildDir}/tmp/java.policy" } else { - systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" + nonInputProperties.systemProperty 'java.security.policy', "file://${buildDir}/tmp/java.policy" } } diff --git a/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java b/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java index ead12a7b2246f..e92c7ca4bdebb 100644 --- a/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java +++ b/plugins/mapper-size/src/internalClusterTest/java/org/elasticsearch/index/mapper/size/SizeMappingIT.java @@ -110,15 +110,15 @@ public void testGetWithFields() throws Exception { assertAcked(prepareCreate("test").setMapping("_size", "enabled=true")); final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}"; indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = client().prepareSearch("test").addFetchField("_size").get(); + SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get(); assertEquals(source.length(), ((Long) searchResponse.getHits().getHits()[0].getFields().get("_size").getValue()).intValue()); // this should not work when requesting fields via wildcard expression - searchResponse = client().prepareSearch("test").addFetchField("*").get(); + searchResponse = prepareSearch("test").addFetchField("*").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); // This should STILL work - searchResponse = client().prepareSearch("test").addStoredField("*").get(); + searchResponse = prepareSearch("test").addStoredField("*").get(); assertNotNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); } @@ -126,13 +126,13 @@ public void testWildCardWithFieldsWhenDisabled() throws Exception { assertAcked(prepareCreate("test").setMapping("_size", "enabled=false")); final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}"; indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = client().prepareSearch("test").addFetchField("_size").get(); + SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); - searchResponse = client().prepareSearch("test").addFetchField("*").get(); + searchResponse = prepareSearch("test").addFetchField("*").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); - searchResponse = client().prepareSearch("test").addStoredField("*").get(); + searchResponse = prepareSearch("test").addStoredField("*").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); } @@ -140,13 +140,13 @@ public void testWildCardWithFieldsWhenNotProvided() throws Exception { assertAcked(prepareCreate("test")); final String source = "{\"f\":\"" + randomAlphaOfLengthBetween(1, 100) + "\"}"; indexRandom(true, client().prepareIndex("test").setId("1").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = client().prepareSearch("test").addFetchField("_size").get(); + SearchResponse searchResponse = prepareSearch("test").addFetchField("_size").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); - searchResponse = client().prepareSearch("test").addFetchField("*").get(); + searchResponse = prepareSearch("test").addFetchField("*").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); - searchResponse = client().prepareSearch("test").addStoredField("*").get(); + searchResponse = prepareSearch("test").addStoredField("*").get(); assertNull(searchResponse.getHits().getHits()[0].getFields().get("_size")); } } diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 78809a170fbbd..5db01ed636995 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -281,7 +281,7 @@ tasks.withType(RestIntegTestTask).configureEach { testTask -> if (disabledIntegTestTaskNames.contains(name) == false) { nonInputProperties.systemProperty "test.krb5.principal.es", "elasticsearch@${realm}" nonInputProperties.systemProperty "test.krb5.principal.hdfs", "hdfs/hdfs.build.elastic.co@${realm}" - jvmArgs "-Djava.security.krb5.conf=${project.configurations.krb5Config.getSingleFile().getPath()}" + nonInputProperties.systemProperty "java.security.krb5.conf", "${project.configurations.krb5Config.getSingleFile().getPath()}" nonInputProperties.systemProperty( "test.krb5.keytab.hdfs", new File(project.configurations.krb5Keytabs.singleFile, "hdfs_hdfs.build.elastic.co.keytab").getPath() @@ -291,7 +291,7 @@ tasks.withType(RestIntegTestTask).configureEach { testTask -> testClusters.matching { it.name == testTask.name }.configureEach { if (testTask.name.contains("Secure")) { - systemProperty "java.security.krb5.conf", configurations.krb5Config.singleFile.getPath() + systemProperty "java.security.krb5.conf", { configurations.krb5Config.singleFile.getPath() }, IGNORE_VALUE extraConfigFile( "repository-hdfs/krb5.keytab", new File(project.configurations.krb5Keytabs.singleFile, "elasticsearch.keytab"), diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java index b3d391ce13cb4..cd38cc04e6b31 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsRepositoryTests.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.elasticsearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.Settings; @@ -19,6 +18,7 @@ import java.util.Collection; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -37,18 +37,18 @@ protected SecureSettings credentials() { @Override protected void createRepository(String repoName) { - AcknowledgedResponse putRepositoryResponse = clusterAdmin().preparePutRepository(repoName) - .setType("hdfs") - .setSettings( - Settings.builder() - .put("uri", "hdfs:///") - .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) - .put("path", "foo") - .put("chunk_size", randomIntBetween(100, 1000) + "k") - .put("compress", randomBoolean()) - ) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + assertAcked( + clusterAdmin().preparePutRepository(repoName) + .setType("hdfs") + .setSettings( + Settings.builder() + .put("uri", "hdfs:///") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) + .put("path", "foo") + .put("chunk_size", randomIntBetween(100, 1000) + "k") + .put("compress", randomBoolean()) + ) + ); } // HDFS repository doesn't have precise cleanup stats so we only check whether or not any blobs were removed diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java index f72a5eeea90d0..b76d2e27be66a 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HdfsTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; @@ -25,6 +24,7 @@ import java.util.Collection; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -39,20 +39,20 @@ protected Collection> getPlugins() { public void testSimpleWorkflow() { Client client = client(); - AcknowledgedResponse putRepositoryResponse = client.admin() - .cluster() - .preparePutRepository("test-repo") - .setType("hdfs") - .setSettings( - Settings.builder() - .put("uri", "hdfs:///") - .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) - .put("path", "foo") - .put("chunk_size", randomIntBetween(100, 1000) + "k") - .put("compress", randomBoolean()) - ) - .get(); - assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true)); + assertAcked( + client.admin() + .cluster() + .preparePutRepository("test-repo") + .setType("hdfs") + .setSettings( + Settings.builder() + .put("uri", "hdfs:///") + .put("conf.fs.AbstractFileSystem.hdfs.impl", TestingFs.class.getName()) + .put("path", "foo") + .put("chunk_size", randomIntBetween(100, 1000) + "k") + .put("compress", randomBoolean()) + ) + ); createIndex("test-idx-1"); createIndex("test-idx-2"); diff --git a/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java b/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java index d4763af7a505a..4d1f6426821c4 100644 --- a/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java +++ b/plugins/store-smb/src/internalClusterTest/java/org/elasticsearch/index/store/smb/AbstractAzureFsTestCase.java @@ -32,7 +32,7 @@ public void testAzureFs() { indexDoc("test", "" + i, "foo", "bar"); } refresh(); - SearchResponse response = client().prepareSearch("test").get(); + SearchResponse response = prepareSearch("test").get(); assertThat(response.getHits().getTotalHits().value, is(nbDocs)); } } diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 75c3d8d77dd72..6af9bc9b11723 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -882,19 +882,12 @@ public void testEmptyShard() throws IOException { } /** - * Tests recovery of an index with or without a translog and the - * statistics we gather about that. + * Tests recovery of an index. */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/52031") public void testRecovery() throws Exception { int count; - boolean shouldHaveTranslog; if (isRunningAgainstOldCluster()) { count = between(200, 300); - /* We've had bugs in the past where we couldn't restore - * an index without a translog so we randomize whether - * or not we have one. */ - shouldHaveTranslog = randomBoolean(); Settings.Builder settings = Settings.builder(); if (minimumNodeVersion().before(Version.V_8_0_0) && randomBoolean()) { settings.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), randomBoolean()); @@ -911,21 +904,8 @@ public void testRecovery() throws Exception { flushRequest.addParameter("force", "true"); flushRequest.addParameter("wait_if_ongoing", "true"); assertOK(client().performRequest(flushRequest)); - - if (shouldHaveTranslog) { - // Update a few documents so we are sure to have a translog - indexRandomDocuments( - count / 10, - false, // flushing here would invalidate the whole thing - false, - true, - i -> jsonBuilder().startObject().field("field", "value").endObject() - ); - } - saveInfoDocument(index + "_should_have_translog", Boolean.toString(shouldHaveTranslog)); } else { count = countOfIndexedRandomDocuments(); - shouldHaveTranslog = Booleans.parseBoolean(loadInfoDocument(index + "_should_have_translog")); } // Count the documents in the index to make sure we have as many as we put there @@ -936,72 +916,13 @@ public void testRecovery() throws Exception { assertTotalHits(count, countResponse); if (false == isRunningAgainstOldCluster()) { - boolean restoredFromTranslog = false; boolean foundPrimary = false; Request recoveryRequest = new Request("GET", "/_cat/recovery/" + index); recoveryRequest.addParameter("h", "index,shard,type,stage,translog_ops_recovered"); recoveryRequest.addParameter("s", "index,shard,type"); String recoveryResponse = toStr(client().performRequest(recoveryRequest)); - for (String line : recoveryResponse.split("\n")) { - // Find the primaries - foundPrimary = true; - if (false == line.contains("done") && line.contains("existing_store")) { - continue; - } - /* Mark if we see a primary that looked like it restored from the translog. - * Not all primaries will look like this all the time because we modify - * random documents when we want there to be a translog and they might - * not be spread around all the shards. */ - Matcher m = Pattern.compile("(\\d+)$").matcher(line); - assertTrue(line, m.find()); - int translogOps = Integer.parseInt(m.group(1)); - if (translogOps > 0) { - restoredFromTranslog = true; - } - } + foundPrimary = recoveryResponse.split("\n").length > 0; assertTrue("expected to find a primary but didn't\n" + recoveryResponse, foundPrimary); - assertEquals("mismatch while checking for translog recovery\n" + recoveryResponse, shouldHaveTranslog, restoredFromTranslog); - - var luceneVersion = IndexVersion.current().luceneVersion(); - String currentLuceneVersion = luceneVersion.toString(); - int currentLuceneVersionMajor = luceneVersion.major; - String bwcLuceneVersion = getOldClusterIndexVersion().luceneVersion().toString(); - if (shouldHaveTranslog && false == currentLuceneVersion.equals(bwcLuceneVersion)) { - int numCurrentVersion = 0; - int numBwcVersion = 0; - Request segmentsRequest = new Request("GET", "/_cat/segments/" + index); - segmentsRequest.addParameter("h", "prirep,shard,index,version"); - segmentsRequest.addParameter("s", "prirep,shard,index"); - String segmentsResponse = toStr(client().performRequest(segmentsRequest)); - for (String line : segmentsResponse.split("\n")) { - if (false == line.startsWith("p")) { - continue; - } - Matcher m = Pattern.compile("((\\d+)\\.\\d+\\.\\d+)$").matcher(line); - assertTrue(line, m.find()); - String version = m.group(1); - int major = Integer.parseInt(m.group(2)); - if (currentLuceneVersion.equals(version)) { - numCurrentVersion++; - } else if (bwcLuceneVersion.equals(version)) { - numBwcVersion++; - } else if (major == currentLuceneVersionMajor - 1) { - // we can read one lucene version back. The upgrade path might have created old segment versions. - // that's ok, we just ignore them - continue; - } else { - fail( - "expected lucene version to be one of [" + currentLuceneVersion + "," + bwcLuceneVersion + "] but was " + line - ); - } - } - assertNotEquals( - "expected at least 1 current segment after translog recovery. segments:\n" + segmentsResponse, - 0, - numCurrentVersion - ); - assertNotEquals("expected at least 1 old segment. segments:\n" + segmentsResponse, 0, numBwcVersion); - } } } diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index f3edc7a90646a..e3796683d1d32 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -84,6 +84,8 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> } } systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" + systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') + systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/HotThreadsIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/HotThreadsIT.java index 9c931e15eeee3..04c59e1ce9214 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/HotThreadsIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/HotThreadsIT.java @@ -17,8 +17,10 @@ public class HotThreadsIT extends ESRestTestCase { + private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); + public void testHotThreads() throws Exception { - final IndexingIT.Nodes nodes = IndexingIT.buildNodeAndVersions(client()); + final MixedClusterTestNodes nodes = MixedClusterTestNodes.buildNodes(client(), BWC_NODES_VERSION); assumeFalse("no new node found", nodes.getNewNodes().isEmpty()); assumeFalse("no bwc node found", nodes.getBWCNodes().isEmpty()); assumeTrue( diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java index b2370c6e564ef..aac4b6a020d4b 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/IndexingIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.backwards; import org.apache.http.HttpHost; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -29,9 +28,10 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.regex.Pattern; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; @@ -39,6 +39,7 @@ import static org.hamcrest.Matchers.oneOf; public class IndexingIT extends ESRestTestCase { + private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); private int indexDocs(String index, final int idStart, final int numDocs) throws IOException { for (int i = 0; i < numDocs; i++) { @@ -76,10 +77,10 @@ private int indexDocWithConcurrentUpdates(String index, final int docId, int nUp } public void testIndexVersionPropagation() throws Exception { - Nodes nodes = buildNodeAndVersions(); + MixedClusterTestNodes nodes = buildNodeAndVersions(); assumeFalse("new nodes is empty", nodes.getNewNodes().isEmpty()); logger.info("cluster discovered: {}", nodes.toString()); - final List bwcNamesList = nodes.getBWCNodes().stream().map(Node::nodeName).collect(Collectors.toList()); + final List bwcNamesList = nodes.getBWCNodes().stream().map(MixedClusterTestNode::nodeName).collect(Collectors.toList()); final String bwcNames = bwcNamesList.stream().collect(Collectors.joining(",")); Settings.Builder settings = Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) @@ -92,7 +93,7 @@ public void testIndexVersionPropagation() throws Exception { try ( RestClient newNodeClient = buildClient( restClientSettings(), - nodes.getNewNodes().stream().map(Node::publishAddress).toArray(HttpHost[]::new) + nodes.getNewNodes().stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) ) ) { @@ -166,10 +167,10 @@ public void testIndexVersionPropagation() throws Exception { } public void testSeqNoCheckpoints() throws Exception { - Nodes nodes = buildNodeAndVersions(); + MixedClusterTestNodes nodes = buildNodeAndVersions(); assumeFalse("new nodes is empty", nodes.getNewNodes().isEmpty()); logger.info("cluster discovered: {}", nodes.toString()); - final List bwcNamesList = nodes.getBWCNodes().stream().map(Node::nodeName).collect(Collectors.toList()); + final List bwcNamesList = nodes.getBWCNodes().stream().map(MixedClusterTestNode::nodeName).collect(Collectors.toList()); final String bwcNames = bwcNamesList.stream().collect(Collectors.joining(",")); Settings.Builder settings = Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) @@ -181,14 +182,14 @@ public void testSeqNoCheckpoints() throws Exception { try ( RestClient newNodeClient = buildClient( restClientSettings(), - nodes.getNewNodes().stream().map(Node::publishAddress).toArray(HttpHost[]::new) + nodes.getNewNodes().stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) ) ) { int numDocs = 0; final int numberOfInitialDocs = 1 + randomInt(5); logger.info("indexing [{}] docs initially", numberOfInitialDocs); numDocs += indexDocs(index, 0, numberOfInitialDocs); - assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : 0, newNodeClient); + assertSeqNoOnShards(index, nodes, numDocs, newNodeClient); logger.info("allowing shards on all nodes"); updateIndexSettings(index, Settings.builder().putNull("index.routing.allocation.include._name")); ensureGreen(index); @@ -199,7 +200,7 @@ public void testSeqNoCheckpoints() throws Exception { final int numberOfDocsAfterAllowingShardsOnAllNodes = 1 + randomInt(5); logger.info("indexing [{}] docs after allowing shards on all nodes", numberOfDocsAfterAllowingShardsOnAllNodes); numDocs += indexDocs(index, numDocs, numberOfDocsAfterAllowingShardsOnAllNodes); - assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : 0, newNodeClient); + assertSeqNoOnShards(index, nodes, numDocs, newNodeClient); Shard primary = buildShards(index, nodes, newNodeClient).stream().filter(Shard::primary).findFirst().get(); logger.info("moving primary to new node by excluding {}", primary.node().nodeName()); updateIndexSettings(index, Settings.builder().put("index.routing.allocation.exclude._name", primary.node().nodeName())); @@ -209,7 +210,7 @@ public void testSeqNoCheckpoints() throws Exception { logger.info("indexing [{}] docs after moving primary", numberOfDocsAfterMovingPrimary); numDocsOnNewPrimary += indexDocs(index, numDocs, numberOfDocsAfterMovingPrimary); numDocs += numberOfDocsAfterMovingPrimary; - assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : numDocsOnNewPrimary, newNodeClient); + assertSeqNoOnShards(index, nodes, numDocs, newNodeClient); /* * Dropping the number of replicas to zero, and then increasing it to one triggers a recovery thus exercising any BWC-logic in * the recovery code. @@ -226,14 +227,14 @@ public void testSeqNoCheckpoints() throws Exception { assertOK(client().performRequest(new Request("POST", index + "/_refresh"))); for (Shard shard : buildShards(index, nodes, newNodeClient)) { - assertCount(index, "_only_nodes:" + shard.node.nodeName, numDocs); + assertCount(index, "_only_nodes:" + shard.node.nodeName(), numDocs); } - assertSeqNoOnShards(index, nodes, nodes.getBWCVersion().major >= 6 ? numDocs : numDocsOnNewPrimary, newNodeClient); + assertSeqNoOnShards(index, nodes, numDocs, newNodeClient); } } public void testUpdateSnapshotStatus() throws Exception { - Nodes nodes = buildNodeAndVersions(); + MixedClusterTestNodes nodes = buildNodeAndVersions(); assumeFalse("new nodes is empty", nodes.getNewNodes().isEmpty()); logger.info("cluster discovered: {}", nodes.toString()); @@ -254,7 +255,7 @@ public void testUpdateSnapshotStatus() throws Exception { assertOK(client().performRequest(request)); - String bwcNames = nodes.getBWCNodes().stream().map(Node::nodeName).collect(Collectors.joining(",")); + String bwcNames = nodes.getBWCNodes().stream().map(MixedClusterTestNode::nodeName).collect(Collectors.joining(",")); // Allocating shards on the BWC nodes to makes sure that taking snapshot happens on those nodes. Settings.Builder settings = Settings.builder() @@ -283,13 +284,40 @@ public void testUpdateSnapshotStatus() throws Exception { request.setJsonEntity("{\"indices\": \"" + index + "\"}"); } + /** + * Tries to extract a major version from a version string, if this is in the major.minor.revision format + * @param version a string representing a version. Can be opaque or semantic + * @return Optional.empty() if the format is not recognized, or an Optional containing the major Integer otherwise + */ + private static Optional extractLegacyMajorVersion(String version) { + var semanticVersionMatcher = Pattern.compile("^(\\d+)\\.\\d+\\.\\d+\\D?.*").matcher(version); + if (semanticVersionMatcher.matches() == false) { + return Optional.empty(); + } + var major = Integer.parseInt(semanticVersionMatcher.group(1)); + return Optional.of(major); + } + + private static boolean syncedFlushDeprecated() { + // Only versions past 8.10 can be non-semantic, so we can safely assume that non-semantic versions have this "feature" + return extractLegacyMajorVersion(BWC_NODES_VERSION).map(m -> m >= 7).orElse(true); + } + + private static boolean syncedFlushRemoved() { + // Only versions past 8.10 can be non-semantic, so we can safely assume that non-semantic versions have this "feature" + return extractLegacyMajorVersion(BWC_NODES_VERSION).map(m -> m >= 8).orElse(true); + } + public void testSyncedFlushTransition() throws Exception { - Nodes nodes = buildNodeAndVersions(); - assumeTrue("bwc version is on 7.x", nodes.getBWCVersion().before(Version.V_8_0_0)); + MixedClusterTestNodes nodes = buildNodeAndVersions(); + assumeTrue( + "bwc version is on 7.x (synced flush deprecated but not removed yet)", + syncedFlushDeprecated() && syncedFlushRemoved() == false + ); assumeFalse("no new node found", nodes.getNewNodes().isEmpty()); assumeFalse("no bwc node found", nodes.getBWCNodes().isEmpty()); // Allocate shards to new nodes then verify synced flush requests processed by old nodes/new nodes - String newNodes = nodes.getNewNodes().stream().map(Node::nodeName).collect(Collectors.joining(",")); + String newNodes = nodes.getNewNodes().stream().map(MixedClusterTestNode::nodeName).collect(Collectors.joining(",")); int numShards = randomIntBetween(1, 10); int numOfReplicas = randomIntBetween(0, nodes.getNewNodes().size() - 1); int totalShards = numShards * (numOfReplicas + 1); @@ -307,7 +335,7 @@ public void testSyncedFlushTransition() throws Exception { try ( RestClient oldNodeClient = buildClient( restClientSettings(), - nodes.getBWCNodes().stream().map(Node::publishAddress).toArray(HttpHost[]::new) + nodes.getBWCNodes().stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) ) ) { Request request = new Request("POST", index + "/_flush/synced"); @@ -335,7 +363,7 @@ public void testSyncedFlushTransition() throws Exception { try ( RestClient newNodeClient = buildClient( restClientSettings(), - nodes.getNewNodes().stream().map(Node::publishAddress).toArray(HttpHost[]::new) + nodes.getNewNodes().stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) ) ) { Request request = new Request("POST", index + "/_flush/synced"); @@ -364,11 +392,11 @@ public void testSyncedFlushTransition() throws Exception { } public void testFlushTransition() throws Exception { - Nodes nodes = buildNodeAndVersions(); + MixedClusterTestNodes nodes = buildNodeAndVersions(); assumeFalse("no new node found", nodes.getNewNodes().isEmpty()); assumeFalse("no bwc node found", nodes.getBWCNodes().isEmpty()); // Allocate shards to new nodes then verify flush requests processed by old nodes/new nodes - String newNodes = nodes.getNewNodes().stream().map(Node::nodeName).collect(Collectors.joining(",")); + String newNodes = nodes.getNewNodes().stream().map(MixedClusterTestNode::nodeName).collect(Collectors.joining(",")); int numShards = randomIntBetween(1, 10); int numOfReplicas = randomIntBetween(0, nodes.getNewNodes().size() - 1); int totalShards = numShards * (numOfReplicas + 1); @@ -386,7 +414,7 @@ public void testFlushTransition() throws Exception { try ( RestClient oldNodeClient = buildClient( restClientSettings(), - nodes.getBWCNodes().stream().map(Node::publishAddress).toArray(HttpHost[]::new) + nodes.getBWCNodes().stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) ) ) { Request request = new Request("POST", index + "/_flush"); @@ -403,7 +431,7 @@ public void testFlushTransition() throws Exception { try ( RestClient newNodeClient = buildClient( restClientSettings(), - nodes.getNewNodes().stream().map(Node::publishAddress).toArray(HttpHost[]::new) + nodes.getNewNodes().stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) ) ) { Request request = new Request("POST", index + "/_flush"); @@ -437,7 +465,7 @@ private void assertVersion(final String index, final int docId, final String pre assertThat("version mismatch for doc [" + docId + "] preference [" + preference + "]", actualVersion, equalTo(expectedVersion)); } - private void assertSeqNoOnShards(String index, Nodes nodes, int numDocs, RestClient client) throws Exception { + private void assertSeqNoOnShards(String index, MixedClusterTestNodes nodes, int numDocs, RestClient client) throws Exception { assertBusy(() -> { try { List shards = buildShards(index, nodes, client); @@ -467,7 +495,7 @@ private void assertSeqNoOnShards(String index, Nodes nodes, int numDocs, RestCli }); } - private List buildShards(String index, Nodes nodes, RestClient client) throws IOException { + private List buildShards(String index, MixedClusterTestNodes nodes, RestClient client) throws IOException { Request request = new Request("GET", index + "/_stats"); request.addParameter("level", "shards"); Response response = client.performRequest(request); @@ -476,7 +504,7 @@ private List buildShards(String index, Nodes nodes, RestClient client) th for (Object shard : shardStats) { final String nodeId = ObjectPath.evaluate(shard, "routing.node"); final Boolean primary = ObjectPath.evaluate(shard, "routing.primary"); - final Node node = nodes.getSafe(nodeId); + final MixedClusterTestNode node = nodes.getSafe(nodeId); final SeqNoStats seqNoStats; Integer maxSeqNo = ObjectPath.evaluate(shard, "seq_no.max_seq_no"); Integer localCheckpoint = ObjectPath.evaluate(shard, "seq_no.local_checkpoint"); @@ -488,86 +516,9 @@ private List buildShards(String index, Nodes nodes, RestClient client) th return shards; } - private Nodes buildNodeAndVersions() throws IOException { - return buildNodeAndVersions(client()); + private MixedClusterTestNodes buildNodeAndVersions() throws IOException { + return MixedClusterTestNodes.buildNodes(client(), BWC_NODES_VERSION); } - static Nodes buildNodeAndVersions(RestClient client) throws IOException { - Response response = client.performRequest(new Request("GET", "_nodes")); - ObjectPath objectPath = ObjectPath.createFromResponse(response); - Map nodesAsMap = objectPath.evaluate("nodes"); - Nodes nodes = new Nodes(); - for (String id : nodesAsMap.keySet()) { - nodes.add( - new Node( - id, - objectPath.evaluate("nodes." + id + ".name"), - Version.fromString(objectPath.evaluate("nodes." + id + ".version")), - HttpHost.create(objectPath.evaluate("nodes." + id + ".http.publish_address")) - ) - ); - } - response = client.performRequest(new Request("GET", "_cluster/state")); - nodes.setMasterNodeId(ObjectPath.createFromResponse(response).evaluate("master_node")); - return nodes; - } - - static final class Nodes extends HashMap { - - private String masterNodeId = null; - - public Node getMaster() { - return get(masterNodeId); - } - - public void setMasterNodeId(String id) { - if (get(id) == null) { - throw new IllegalArgumentException("node with id [" + id + "] not found. got:" + toString()); - } - masterNodeId = id; - } - - public void add(Node node) { - put(node.id(), node); - } - - public List getNewNodes() { - Version bwcVersion = getBWCVersion(); - return values().stream().filter(n -> n.version().after(bwcVersion)).collect(Collectors.toList()); - } - - public List getBWCNodes() { - Version bwcVersion = getBWCVersion(); - return values().stream().filter(n -> n.version().equals(bwcVersion)).collect(Collectors.toList()); - } - - public Version getBWCVersion() { - if (isEmpty()) { - throw new IllegalStateException("no nodes available"); - } - return Version.fromId(values().stream().map(node -> node.version().id).min(Integer::compareTo).get()); - } - - public Node getSafe(String id) { - Node node = get(id); - if (node == null) { - throw new IllegalArgumentException("node with id [" + id + "] not found"); - } - return node; - } - - @Override - public String toString() { - return "Nodes{" - + "masterNodeId='" - + masterNodeId - + "'\n" - + values().stream().map(Node::toString).collect(Collectors.joining("\n")) - + '}'; - } - } - - record Node(String id, String nodeName, Version version, HttpHost publishAddress) {} - - record Shard(Node node, boolean primary, SeqNoStats seqNoStats) {} + private record Shard(MixedClusterTestNode node, boolean primary, SeqNoStats seqNoStats) {} } diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterTestNode.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterTestNode.java new file mode 100644 index 0000000000000..c9cdf9dd41735 --- /dev/null +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterTestNode.java @@ -0,0 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.backwards; + +import org.apache.http.HttpHost; + +record MixedClusterTestNode(String id, String nodeName, String version, HttpHost publishAddress) {} diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterTestNodes.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterTestNodes.java new file mode 100644 index 0000000000000..d4bfea19fd1ff --- /dev/null +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/MixedClusterTestNodes.java @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.backwards; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.test.rest.ObjectPath; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +final class MixedClusterTestNodes { + private final Map nodesById; + private final String bwcNodesVersion; + + private MixedClusterTestNodes(String bwcNodesVersion, Map nodesById) { + this.bwcNodesVersion = bwcNodesVersion; + this.nodesById = nodesById; + } + + public List getNewNodes() { + return nodesById.values().stream().filter(n -> n.version().equals(bwcNodesVersion) == false).collect(Collectors.toList()); + } + + public List getBWCNodes() { + return nodesById.values().stream().filter(n -> n.version().equals(bwcNodesVersion)).collect(Collectors.toList()); + } + + public MixedClusterTestNode getSafe(String id) { + MixedClusterTestNode node = nodesById.get(id); + if (node == null) { + throw new IllegalArgumentException("node with id [" + id + "] not found"); + } + return node; + } + + static MixedClusterTestNodes buildNodes(RestClient client, String bwcNodesVersion) throws IOException { + Response response = client.performRequest(new Request("GET", "_nodes")); + ObjectPath objectPath = ObjectPath.createFromResponse(response); + Map nodesAsMap = objectPath.evaluate("nodes"); + + Map nodesById = new HashMap<>(); + for (var id : nodesAsMap.keySet()) { + nodesById.put( + id, + new MixedClusterTestNode( + id, + objectPath.evaluate("nodes." + id + ".name"), + objectPath.evaluate("nodes." + id + ".version"), + HttpHost.create(objectPath.evaluate("nodes." + id + ".http.publish_address")) + ) + ); + } + return new MixedClusterTestNodes(bwcNodesVersion, Collections.unmodifiableMap(nodesById)); + } + + public int size() { + return nodesById.size(); + } +} diff --git a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java index a0f0f1319b40d..461b731e518fb 100644 --- a/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java +++ b/qa/mixed-cluster/src/test/java/org/elasticsearch/backwards/SearchWithMinCompatibleSearchNodeIT.java @@ -8,16 +8,12 @@ package org.elasticsearch.backwards; import org.apache.http.HttpHost; -import org.elasticsearch.Version; -import org.elasticsearch.backwards.IndexingIT.Node; -import org.elasticsearch.backwards.IndexingIT.Nodes; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.Strings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.rest.ESRestTestCase; @@ -34,25 +30,24 @@ public class SearchWithMinCompatibleSearchNodeIT extends ESRestTestCase { + private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); + private static final String NEW_NODES_VERSION = System.getProperty("tests.new_nodes_version"); + private static String index = "test_min_version"; private static int numShards; private static int numReplicas = 1; private static int numDocs; - private static Nodes nodes; - private static List allNodes; - private static Version bwcVersion; - private static Version newVersion; + private static MixedClusterTestNodes nodes; + private static List allNodes; @Before public void prepareTestData() throws IOException { - nodes = IndexingIT.buildNodeAndVersions(client()); + nodes = MixedClusterTestNodes.buildNodes(client(), BWC_NODES_VERSION); numShards = nodes.size(); numDocs = randomIntBetween(numShards, 16); allNodes = new ArrayList<>(); allNodes.addAll(nodes.getBWCNodes()); allNodes.addAll(nodes.getNewNodes()); - bwcVersion = nodes.getBWCNodes().get(0).version(); - newVersion = nodes.getNewNodes().get(0).version(); if (client().performRequest(new Request("HEAD", "/" + index)).getStatusLine().getStatusCode() == 404) { createIndex( @@ -72,10 +67,15 @@ public void prepareTestData() throws IOException { } public void testMinVersionAsNewVersion() throws Exception { - try (RestClient client = buildClient(restClientSettings(), allNodes.stream().map(Node::publishAddress).toArray(HttpHost[]::new))) { + try ( + RestClient client = buildClient( + restClientSettings(), + allNodes.stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) + ) + ) { Request newVersionRequest = new Request( "POST", - index + "/_search?min_compatible_shard_node=" + newVersion + "&ccs_minimize_roundtrips=false" + index + "/_search?min_compatible_shard_node=" + NEW_NODES_VERSION + "&ccs_minimize_roundtrips=false" ); assertBusy(() -> { ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(newVersionRequest)); @@ -87,73 +87,65 @@ public void testMinVersionAsNewVersion() throws Exception { {"error":{"root_cause":[],"type":"search_phase_execution_exception\"""")); assertThat(responseException.getMessage(), containsString(Strings.format(""" caused_by":{"type":"version_mismatch_exception",\ - "reason":"One of the shards is incompatible with the required minimum version [%s]\"""", newVersion))); + "reason":"One of the shards is incompatible with the required minimum version [%s]\"""", NEW_NODES_VERSION))); }); } } public void testMinVersionAsOldVersion() throws Exception { - try (RestClient client = buildClient(restClientSettings(), allNodes.stream().map(Node::publishAddress).toArray(HttpHost[]::new))) { + try ( + RestClient client = buildClient( + restClientSettings(), + allNodes.stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) + ) + ) { Request oldVersionRequest = new Request( "POST", - index + "/_search?min_compatible_shard_node=" + bwcVersion + "&ccs_minimize_roundtrips=false" + index + "/_search?min_compatible_shard_node=" + BWC_NODES_VERSION + "&ccs_minimize_roundtrips=false" ); oldVersionRequest.setJsonEntity(""" {"query":{"match_all":{}},"_source":false}"""); assertBusy(() -> { - assertWithBwcVersionCheck(() -> { - Response response = client.performRequest(oldVersionRequest); - ObjectPath responseObject = ObjectPath.createFromResponse(response); - Map shardsResult = responseObject.evaluate("_shards"); - assertThat(shardsResult.get("total"), equalTo(numShards)); - assertThat(shardsResult.get("successful"), equalTo(numShards)); - assertThat(shardsResult.get("failed"), equalTo(0)); - Map hitsResult = responseObject.evaluate("hits.total"); - assertThat(hitsResult.get("value"), equalTo(numDocs)); - assertThat(hitsResult.get("relation"), equalTo("eq")); - }, client, oldVersionRequest); + Response response = client.performRequest(oldVersionRequest); + ObjectPath responseObject = ObjectPath.createFromResponse(response); + Map shardsResult = responseObject.evaluate("_shards"); + assertThat(shardsResult.get("total"), equalTo(numShards)); + assertThat(shardsResult.get("successful"), equalTo(numShards)); + assertThat(shardsResult.get("failed"), equalTo(0)); + Map hitsResult = responseObject.evaluate("hits.total"); + assertThat(hitsResult.get("value"), equalTo(numDocs)); + assertThat(hitsResult.get("relation"), equalTo("eq")); }); } } public void testCcsMinimizeRoundtripsIsFalse() throws Exception { - try (RestClient client = buildClient(restClientSettings(), allNodes.stream().map(Node::publishAddress).toArray(HttpHost[]::new))) { - Version version = randomBoolean() ? newVersion : bwcVersion; + try ( + RestClient client = buildClient( + restClientSettings(), + allNodes.stream().map(MixedClusterTestNode::publishAddress).toArray(HttpHost[]::new) + ) + ) { + String version = randomBoolean() ? NEW_NODES_VERSION : BWC_NODES_VERSION; Request request = new Request( "POST", index + "/_search?min_compatible_shard_node=" + version + "&ccs_minimize_roundtrips=true" ); assertBusy(() -> { - assertWithBwcVersionCheck(() -> { - ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(request)); - assertThat( - responseException.getResponse().getStatusLine().getStatusCode(), - equalTo(RestStatus.BAD_REQUEST.getStatus()) - ); - assertThat(responseException.getMessage(), containsString(""" - {"error":{"root_cause":[{"type":"action_request_validation_exception"\ - """)); - assertThat( - responseException.getMessage(), - containsString( - "\"reason\":\"Validation Failed: 1: " - + "[ccs_minimize_roundtrips] cannot be [true] when setting a minimum compatible shard version;\"" - ) - ); - }, client, request); + ResponseException responseException = expectThrows(ResponseException.class, () -> client.performRequest(request)); + assertThat(responseException.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); + assertThat(responseException.getMessage(), containsString(""" + {"error":{"root_cause":[{"type":"action_request_validation_exception"\ + """)); + assertThat( + responseException.getMessage(), + containsString( + "\"reason\":\"Validation Failed: 1: " + + "[ccs_minimize_roundtrips] cannot be [true] when setting a minimum compatible shard version;\"" + ) + ); }); } } - - private void assertWithBwcVersionCheck(CheckedRunnable code, RestClient client, Request request) throws Exception { - if (bwcVersion.before(Version.V_7_12_0)) { - // min_compatible_shard_node support doesn't exist in older versions and there will be an "unrecognized parameter" exception - ResponseException exception = expectThrows(ResponseException.class, () -> client.performRequest(request)); - assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); - assertThat(exception.getMessage(), containsString("contains unrecognized parameter: [min_compatible_shard_node]")); - } else { - code.run(); - } - } } diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java index 8ca4ce7a4eb2f..5255cbf401c9a 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/CCSDuelIT.java @@ -19,7 +19,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.bulk.BulkProcessor2; import org.elasticsearch.action.bulk.BulkRequest; @@ -187,8 +186,7 @@ private static void indexDocuments(String idPrefix) throws IOException, Interrup IndexResponse indexResponse = restHighLevelClient.index(indexRequest, RequestOptions.DEFAULT); assertEquals(201, indexResponse.status().getStatus()); - CreateIndexResponse response = createIndex(INDEX_NAME + "_empty"); - assertTrue(response.isAcknowledged()); + ElasticsearchAssertions.assertAcked(createIndex(INDEX_NAME + "_empty")); int numShards = randomIntBetween(1, 5); Settings settings = indexSettings(numShards, 0).build(); @@ -209,8 +207,7 @@ private static void indexDocuments(String idPrefix) throws IOException, Interrup } } }"""; - response = createIndex(INDEX_NAME, settings, mapping); - assertTrue(response.isAcknowledged()); + ElasticsearchAssertions.assertAcked(createIndex(INDEX_NAME, settings, mapping)); BulkProcessor2 bulkProcessor = BulkProcessor2.builder( (r, l) -> restHighLevelClient.bulkAsync(r, RequestOptions.DEFAULT, l), diff --git a/qa/os/centos-7/build.gradle b/qa/os/centos-7/build.gradle deleted file mode 100644 index 814b04d4aec5f..0000000000000 --- a/qa/os/centos-7/build.gradle +++ /dev/null @@ -1 +0,0 @@ -project.ext.shouldTestDocker = true diff --git a/qa/os/debian-9/build.gradle b/qa/os/debian-9/build.gradle deleted file mode 100644 index 814b04d4aec5f..0000000000000 --- a/qa/os/debian-9/build.gradle +++ /dev/null @@ -1 +0,0 @@ -project.ext.shouldTestDocker = true diff --git a/qa/os/fedora-28/build.gradle b/qa/os/fedora-28/build.gradle deleted file mode 100644 index 814b04d4aec5f..0000000000000 --- a/qa/os/fedora-28/build.gradle +++ /dev/null @@ -1 +0,0 @@ -project.ext.shouldTestDocker = true diff --git a/qa/os/fedora-29/build.gradle b/qa/os/fedora-29/build.gradle deleted file mode 100644 index 814b04d4aec5f..0000000000000 --- a/qa/os/fedora-29/build.gradle +++ /dev/null @@ -1 +0,0 @@ -project.ext.shouldTestDocker = true diff --git a/qa/os/oel-7/build.gradle b/qa/os/oel-7/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/qa/os/sles-12/build.gradle b/qa/os/sles-12/build.gradle deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/qa/os/ubuntu-1804/build.gradle b/qa/os/ubuntu-1804/build.gradle deleted file mode 100644 index 814b04d4aec5f..0000000000000 --- a/qa/os/ubuntu-1804/build.gradle +++ /dev/null @@ -1 +0,0 @@ -project.ext.shouldTestDocker = true diff --git a/qa/os/windows-2012r2/build.gradle b/qa/os/windows-2012r2/build.gradle deleted file mode 100644 index 63b149712bcfd..0000000000000 --- a/qa/os/windows-2012r2/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -import org.elasticsearch.gradle.internal.test.GradleDistroTestTask - -String boxId = project.properties.get('vagrant.windows-2012r2.id') -if (boxId != null) { - vagrant { - hostEnv 'VAGRANT_WINDOWS_2012R2_BOX', boxId - } -} else { - // box id was not supplied, so disable the distro tests - tasks.withType(GradleDistroTestTask).configureEach { - onlyIf("Project property vagrant.windows-2012r2.id set") { false } - } -} diff --git a/qa/os/windows-2016/build.gradle b/qa/os/windows-2016/build.gradle deleted file mode 100644 index c54cc97c68bfe..0000000000000 --- a/qa/os/windows-2016/build.gradle +++ /dev/null @@ -1,13 +0,0 @@ -import org.elasticsearch.gradle.internal.test.GradleDistroTestTask - -String boxId = project.properties.get('vagrant.windows-2016.id') -if (boxId != null) { - vagrant { - hostEnv 'VAGRANT_WINDOWS_2016_BOX', boxId - } -} else { - // box id was not supplied, so disable the distro tests - tasks.withType(GradleDistroTestTask).configureEach { - enabled = false - } -} diff --git a/qa/os/README.md b/qa/packaging/README.md similarity index 100% rename from qa/os/README.md rename to qa/packaging/README.md diff --git a/qa/os/build.gradle b/qa/packaging/build.gradle similarity index 90% rename from qa/os/build.gradle rename to qa/packaging/build.gradle index b12c53e63e10c..758dfe6661766 100644 --- a/qa/os/build.gradle +++ b/qa/packaging/build.gradle @@ -36,13 +36,3 @@ tasks.named("test").configure { enabled = false } tasks.register('destructivePackagingTest') { dependsOn 'destructiveDistroTest' } - -subprojects { Project platformProject -> - tasks.register('packagingTest') { - dependsOn 'distroTest' - } - - vagrant { - hostEnv 'VAGRANT_PROJECT_DIR', platformProject.projectDir.absolutePath - } -} diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveGenerateInitialCredentialsTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/ArchiveTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/CertGenCliTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/ConfigurationTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/CronEvalCliTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/CronEvalCliTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/CronEvalCliTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/CronEvalCliTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebPreservationTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/DockerTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/EnrollNodeToClusterTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/EnrollNodeToClusterTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/EnrollNodeToClusterTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/EnrollNodeToClusterTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/EnrollmentProcessTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/EnrollmentProcessTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/EnrollmentProcessTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/EnrollmentProcessTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/HttpClientThreadsFilter.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/HttpClientThreadsFilter.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/HttpClientThreadsFilter.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/HttpClientThreadsFilter.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackageTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/PackageTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackageTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackageUpgradeTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagesSecurityAutoConfigurationTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/PasswordToolsTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/PluginCliTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PluginCliTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/PluginCliTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/PluginCliTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/RpmMetadataTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/RpmMetadataTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/RpmMetadataTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/RpmMetadataTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/RpmPreservationTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/SqlCliTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/SqlCliTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/SqlCliTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/SqlCliTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/TemporaryDirectoryConfigTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/TemporaryDirectoryConfigTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/TemporaryDirectoryConfigTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/TemporaryDirectoryConfigTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/test/WindowsServiceTests.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Archives.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Archives.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Archives.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Cleanup.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Cleanup.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Cleanup.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Distribution.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileExistenceMatchers.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileExistenceMatchers.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/FileExistenceMatchers.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileExistenceMatchers.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileMatcher.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileUtils.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/FileUtils.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/FileUtils.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Installation.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Installation.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Installation.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Installation.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Packages.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Packages.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Packages.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Platforms.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Platforms.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Platforms.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Platforms.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/ProcessInfo.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ProcessInfo.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/ProcessInfo.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/ProcessInfo.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/ServerUtils.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Shell.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/Shell.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/Shell.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/Docker.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileAttributes.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileAttributes.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileAttributes.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileAttributes.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileMatcher.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileMatcher.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileMatcher.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerFileMatcher.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerShell.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerShell.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/docker/DockerShell.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerShell.java diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/util/docker/MockServer.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/MockServer.java similarity index 100% rename from qa/os/src/test/java/org/elasticsearch/packaging/util/docker/MockServer.java rename to qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/MockServer.java diff --git a/qa/os/src/test/resources/log4j2-test.properties b/qa/packaging/src/test/resources/log4j2-test.properties similarity index 100% rename from qa/os/src/test/resources/log4j2-test.properties rename to qa/packaging/src/test/resources/log4j2-test.properties diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/http.crt b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http.crt similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/http.crt rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http.crt diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/http.key b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http.key similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/http.key rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http.key diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/http_ca.crt b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http_ca.crt similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/http_ca.crt rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http_ca.crt diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/http_ca.key b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http_ca.key similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/http_ca.key rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/http_ca.key diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/transport.crt b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/transport.crt similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/transport.crt rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/transport.crt diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/transport.key b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/transport.key similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/transport.key rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/transport.key diff --git a/qa/os/src/test/resources/org/elasticsearch/packaging/test/transport_ca.crt b/qa/packaging/src/test/resources/org/elasticsearch/packaging/test/transport_ca.crt similarity index 100% rename from qa/os/src/test/resources/org/elasticsearch/packaging/test/transport_ca.crt rename to qa/packaging/src/test/resources/org/elasticsearch/packaging/test/transport_ca.crt diff --git a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java index a7dad1ebeec50..c020cc118ca78 100644 --- a/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java +++ b/qa/repository-multi-version/src/test/java/org/elasticsearch/upgrades/MultiVersionRepositoryAccessIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.snapshots.SnapshotsService; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; @@ -183,7 +184,7 @@ public void testUpgradeMovesRepoToNewMetaVersion() throws IOException { // to check behavior on other operations below. final boolean verify = TEST_STEP != TestStep.STEP3_OLD_CLUSTER || SnapshotsService.includesUUIDs(minNodeVersion) - || minNodeVersion.before(IndexVersion.V_7_12_0); + || minNodeVersion.before(IndexVersions.V_7_12_0); if (verify == false) { expectThrowsAnyOf(EXPECTED_BWC_EXCEPTIONS, () -> createRepository(repoName, false, true)); } diff --git a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index f08427f8949a5..eb05d331af033 100644 --- a/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade-legacy/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.Booleans; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.rest.ObjectPath; import org.hamcrest.Matchers; @@ -430,7 +431,7 @@ public void testRecoveryClosedIndex() throws Exception { } final IndexVersion indexVersionCreated = indexVersionCreated(indexName); - if (indexVersionCreated.onOrAfter(IndexVersion.V_7_2_0)) { + if (indexVersionCreated.onOrAfter(IndexVersions.V_7_2_0)) { // index was created on a version that supports the replication of closed indices, // so we expect the index to be closed and replicated ensureGreen(indexName); @@ -500,7 +501,7 @@ public void testClosedIndexNoopRecovery() throws Exception { closeIndex(indexName); } - if (indexVersionCreated(indexName).onOrAfter(IndexVersion.V_7_2_0)) { + if (indexVersionCreated(indexName).onOrAfter(IndexVersions.V_7_2_0)) { // index was created on a version that supports the replication of closed indices, so we expect it to be closed and replicated assertTrue(minimumNodeVersion().onOrAfter(Version.V_7_2_0)); ensureGreen(indexName); diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java new file mode 100644 index 0000000000000..2d8ff8b747323 --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ClusterFeatureMigrationIT.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.upgrades; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.client.Request; +import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.features.FeatureService; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasSize; + +public class ClusterFeatureMigrationIT extends ParameterizedRollingUpgradeTestCase { + + @BeforeClass + public static void checkMigrationVersion() { + assumeTrue( + "This checks migrations from before cluster features were introduced", + getOldClusterVersion().before(FeatureService.CLUSTER_FEATURES_ADDED_VERSION) + ); + } + + public ClusterFeatureMigrationIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testClusterFeatureMigration() throws IOException { + if (isUpgradedCluster()) { + // check the nodes all have a feature in their cluster state (there should always be features_supported) + var response = entityAsMap(adminClient().performRequest(new Request("GET", "/_cluster/state/nodes"))); + List nodeFeatures = (List) XContentMapValues.extractValue("nodes_features", response); + assertThat(nodeFeatures, hasSize(adminClient().getNodes().size())); + + Map> features = nodeFeatures.stream() + .map(o -> (Map) o) + .collect(Collectors.toMap(m -> (String) m.get("node_id"), m -> (List) m.get("features"))); + + Set missing = features.entrySet() + .stream() + .filter(e -> e.getValue().contains(FeatureService.FEATURES_SUPPORTED.id()) == false) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + assertThat(missing + " out of " + features.keySet() + " does not have the required feature", missing, empty()); + } + } +} diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java index e269b608930a8..f3971d832be3e 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/FieldCapsIT.java @@ -11,7 +11,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import org.apache.http.HttpHost; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; @@ -272,18 +272,19 @@ public void testAllIndicesWithIndexFilter() throws Exception { @SuppressWarnings("unchecked") // Returns a client connected to one of the upgraded nodes. private RestClient getUpgradedNodeClient() throws IOException { + var currentVersion = Build.current().version(); for (HttpHost host : getClusterHosts()) { RestClient client = RestClient.builder(host).build(); Request nodesRequest = new Request("GET", "_nodes/_local/_none"); Map nodeMap = (Map) entityAsMap(client.performRequest(nodesRequest)).get("nodes"); Map nameMap = (Map) nodeMap.values().iterator().next(); String version = (String) nameMap.get("version"); - if (version.equals(Version.CURRENT.toString())) { + if (version.equals(currentVersion)) { return client; } client.close(); } - throw new IllegalStateException("Couldn't find node on version " + Version.CURRENT); + throw new IllegalStateException("Couldn't find node on version " + currentVersion); } // Test field type filtering on mixed cluster diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java index 73ab8fb0c25d2..d5b5e24e2ccde 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/IndexingIT.java @@ -29,6 +29,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; import static org.elasticsearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.elasticsearch.test.ListMatcher.matchesList; @@ -139,8 +140,7 @@ public void testAutoIdWithOpTypeCreate() throws IOException { Request waitForGreen = new Request("GET", "/_cluster/health"); waitForGreen.addParameter("wait_for_nodes", "3"); client().performRequest(waitForGreen); - Version minNodeVersion = minNodeVersion(); - if (minNodeVersion.before(Version.V_7_5_0)) { + if (clusterSupportsBulkApi() == false) { ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(bulk)); assertEquals(400, e.getResponse().getStatusLine().getStatusCode()); assertThat( @@ -410,19 +410,13 @@ private void assertCount(String index, int count) throws IOException { ); } - private Version minNodeVersion() throws IOException { + // TODO[lor]: replace this check with a (historical) feature check ("supports bulk requests") + private boolean clusterSupportsBulkApi() throws IOException { Map response = entityAsMap(client().performRequest(new Request("GET", "_nodes"))); Map nodes = (Map) response.get("nodes"); - Version minNodeVersion = null; - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); - Version nodeVersion = Version.fromString(nodeInfo.get("version").toString()); - if (minNodeVersion == null) { - minNodeVersion = nodeVersion; - } else if (nodeVersion.before(minNodeVersion)) { - minNodeVersion = nodeVersion; - } - } - return minNodeVersion; + + Predicate> nodeSupportsBulkApi = n -> Version.fromString(n.get("version").toString()).onOrAfter(Version.V_7_5_0); + + return nodes.values().stream().map(o -> (Map) o).allMatch(nodeSupportsBulkApi); } } diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java index 26b1f8a0153b6..5a2c4c783ec85 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/ParameterizedRollingUpgradeTestCase.java @@ -39,7 +39,7 @@ import static org.hamcrest.Matchers.notNullValue; public abstract class ParameterizedRollingUpgradeTestCase extends ESRestTestCase { - private static final Version OLD_CLUSTER_VERSION = Version.fromString(System.getProperty("tests.old_cluster_version")); + private static final String OLD_CLUSTER_VERSION = System.getProperty("tests.old_cluster_version"); private static final TemporaryFolder repoDirectory = new TemporaryFolder(); @@ -142,7 +142,7 @@ public static void resetNodes() { } protected static org.elasticsearch.Version getOldClusterVersion() { - return org.elasticsearch.Version.fromString(OLD_CLUSTER_VERSION.toString()); + return org.elasticsearch.Version.fromString(OLD_CLUSTER_VERSION); } protected static IndexVersion getOldClusterIndexVersion() { @@ -151,7 +151,11 @@ protected static IndexVersion getOldClusterIndexVersion() { } protected static Version getOldClusterTestVersion() { - return Version.fromString(OLD_CLUSTER_VERSION.toString()); + return Version.fromString(OLD_CLUSTER_VERSION); + } + + protected static boolean isOldClusterVersion(String nodeVersion) { + return OLD_CLUSTER_VERSION.equals(nodeVersion); } protected static boolean isOldCluster() { diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java index 95f4c55314199..4b765849e6ea9 100644 --- a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/SnapshotBasedRecoveryIT.java @@ -100,7 +100,7 @@ public void testSnapshotBasedRecovery() throws Exception { } String primaryNodeId = getPrimaryNodeIdOfShard(indexName, 0); - Version primaryNodeVersion = getNodeVersion(primaryNodeId); + String primaryNodeVersion = getNodeVersion(primaryNodeId); // Sometimes the primary shard ends on the upgraded node (i.e. after a rebalance) // This causes issues when removing and adding replicas, since then we cannot allocate to any of the old nodes. @@ -108,13 +108,13 @@ public void testSnapshotBasedRecovery() throws Exception { // In that case we exclude the upgraded node from the shard allocation and cancel the shard to force moving // the primary to a node in the old version, this allows adding replicas in the first mixed round. logger.info("--> Primary node in first mixed round {} / {}", primaryNodeId, primaryNodeVersion); - if (primaryNodeVersion.after(getOldClusterVersion())) { + if (isOldClusterVersion(primaryNodeVersion) == false) { logger.info("--> cancelling primary shard on node [{}]", primaryNodeId); cancelShard(indexName, 0, primaryNodeId); logger.info("--> done cancelling primary shard on node [{}]", primaryNodeId); String currentPrimaryNodeId = getPrimaryNodeIdOfShard(indexName, 0); - assertThat(getNodeVersion(currentPrimaryNodeId), is(equalTo(getOldClusterVersion()))); + assertTrue(isOldClusterVersion(getNodeVersion(currentPrimaryNodeId))); } } else { logger.info("--> not in first upgrade round, removing exclusions for [{}]", indexName); @@ -148,19 +148,18 @@ private List getUpgradedNodeIds() throws IOException { Map> nodes = extractValue(responseMap, "nodes"); List upgradedNodes = new ArrayList<>(); for (Map.Entry> nodeInfoEntry : nodes.entrySet()) { - Version nodeVersion = Version.fromString(extractValue(nodeInfoEntry.getValue(), "version")); - if (nodeVersion.after(getOldClusterVersion())) { + String nodeVersion = extractValue(nodeInfoEntry.getValue(), "version"); + if (isOldClusterVersion(nodeVersion) == false) { upgradedNodes.add(nodeInfoEntry.getKey()); } } return upgradedNodes; } - private Version getNodeVersion(String primaryNodeId) throws IOException { + private String getNodeVersion(String primaryNodeId) throws IOException { Request request = new Request(HttpGet.METHOD_NAME, "_nodes/" + primaryNodeId); Response response = client().performRequest(request); - String nodeVersion = extractValue(responseAsMap(response), "nodes." + primaryNodeId + ".version"); - return Version.fromString(nodeVersion); + return extractValue(responseAsMap(response), "nodes." + primaryNodeId + ".version"); } private String getPrimaryNodeIdOfShard(String indexName, int shard) throws Exception { diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index 7e352c0f8adb2..f5beef38319e5 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -18,6 +18,7 @@ dependencies { clusterModules project(":modules:ingest-common") clusterModules project(":modules:reindex") clusterModules project(":modules:analysis-common") + clusterModules project(":modules:health-shards-availability") } tasks.named("yamlRestTest").configure { diff --git a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index ff3af31e4afd6..9afb533b037b4 100644 --- a/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/yamlRestTest/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -29,6 +29,7 @@ public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTe .module("ingest-common") .module("reindex") .module("analysis-common") + .module("health-shards-availability") // The first node does not have the ingest role so we're sure ingest requests are forwarded: .node(0, n -> n.setting("node.roles", "[master,data,ml,remote_cluster_client,transform]")) .feature(FeatureFlag.TIME_SERIES_MODE) diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index d9c0ab5294906..e484b98d3188e 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -37,6 +37,7 @@ dependencies { clusterModules project(":modules:rest-root") clusterModules project(":modules:reindex") clusterModules project(':modules:analysis-common') + clusterModules project(':modules:health-shards-availability') } tasks.named("yamlRestTestV7CompatTransform").configure { task -> @@ -218,6 +219,12 @@ tasks.named("yamlRestTestV7CompatTransform").configure { task -> // we can now search using doc values only task.replaceValueInMatch("fields.object\\.nested1.long.searchable", true) + + //client.type no longer exists #101214 + task.replaceKeyInMatch("nodes.\$node_id.settings.client.type", "nodes.\$node_id.settings.node.attr.testattr") + task.replaceValueInMatch("nodes.\$node_id.settings.node.attr.testattr", "test") + task.replaceKeyInMatch("nodes.\$node_id.settings.client\\.type", "nodes.\$node_id.settings.node\\.attr\\.testattr") + task.replaceValueInMatch("nodes.\$node_id.settings.node\\.attr\\.testattr", "test") } tasks.register('enforceYamlTestConvention').configure { diff --git a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java index 36110174b9e83..465f17eca5532 100644 --- a/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java +++ b/rest-api-spec/src/yamlRestTest/java/org/elasticsearch/test/rest/ClientYamlTestSuiteIT.java @@ -31,6 +31,7 @@ public class ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { .module("rest-root") .module("reindex") .module("analysis-common") + .module("health-shards-availability") .feature(FeatureFlag.TIME_SERIES_MODE) .build(); diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/70_tsdb.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/70_tsdb.yml index 4730415a3162c..130f3690bb298 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/70_tsdb.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/70_tsdb.yml @@ -1,3 +1,9 @@ +--- +setup: + - skip: + version: "8.7.00 - 8.9.99" + reason: "Synthetic source shows up in the mapping in 8.10 and on, may trigger assert failures in mixed cluster tests" + --- "basic tsdb delete": - skip: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.info/30_settings.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.info/30_settings.yml index 99b8b6f361a47..dc6062c6f282b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.info/30_settings.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/nodes.info/30_settings.yml @@ -12,11 +12,11 @@ nodes.info: metric: [ settings ] - - match : { nodes.$node_id.settings.client.type: node } + - match : { nodes.$node_id.settings.node.attr.testattr: test } - do: nodes.info: metric: [ settings ] flat_settings: true - - match : { nodes.$node_id.settings.client\.type: node } + - match : { nodes.$node_id.settings.node\.attr\.testattr: test } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml index af09c56f0cfca..151698482368a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.vectors/60_dense_vector_dynamic_mapping.yml @@ -3,34 +3,6 @@ setup: version: ' - 8.10.99' reason: 'Dynamic mapping of floats to dense_vector was added in 8.11' - # Additional logging for issue: https://github.com/elastic/elasticsearch/issues/100502 - - do: - cluster.put_settings: - body: > - { - "persistent": { - "logger.org.elasticsearch.index": "TRACE", - "logger.org.elasticsearch.action.admin.indices.mapping.get": "TRACE", - "logger.org.elasticsearch.cluster.service.ClusterApplierService": "TRACE" - } - } - ---- -teardown: - - skip: - version: ' - 8.10.99' - reason: 'Dynamic mapping of floats to dense_vector was added in 8.11' - - - do: - cluster.put_settings: - body: > - { - "persistent": { - "logger.org.elasticsearch.index": null, - "logger.org.elasticsearch.action.admin.indices.mapping.get": null, - "logger.org.elasticsearch.cluster.service.ClusterApplierService": null - } - } --- "Fields with float arrays below the threshold still map as float": @@ -41,7 +13,9 @@ teardown: refresh: true body: my_field: [ 230.0, 300.33, -34.8988, 15.555, -200.0 ] - + - do: + cluster.health: + wait_for_events: languid - do: indices.get_mapping: index: test-too-short-still-float @@ -60,6 +34,9 @@ teardown: my_field: [ -457.1953,259.6788,271.9127,-26.8833,403.0915,-56.9197,-445.8869,-108.8167,417.8988,13.4232,-281.765,-405.8573,262.7831,-279.493,328.5591,-453.3941,-116.0368,435.4734,-439.0927,-332.9565,355.4955,324.9878,33.3519,-165.0182,188.1811,467.3455,185.1057,-233.8598,-17.6827,283.4271,-329.1247,-402.9721,404.7866,-358.7031,-267.4074,441.8363,320.2389,-128.0179,339.544,196.2018,-60.2688,336.0228,-440.1943,318.6882,-158.2596,277.0925,-487.4971,-338.9865,-275.716,136.8547,-253.6206,-40.2807,-357.0971,188.0344,-203.0674,449.9618,-223.2508,468.1441,302.4002,-65.0044,342.4431,205.6774,-118.636,-29.9706,183.9825,223.956,314.0691,137.0129,-8.0452,-15.131,-269.8643,-12.691,228.9777,-147.8384,-347.1117,-283.1905,459.2004,296.1321,-483.1799,414.3423,383.0187,-408.5525,-286.8169,482.5853,9.5232,-459.4968,-333.2521,109.0969,129.5107,43.4369,455.8283,-4.0423,-318.5019,339.1641,416.3581,-309.0429,84.2325,-355.8753,264.7671,43.8922,-298.6039,412.4413,19.4198,-251.279,-191.157,-478.2058,251.5709,-178.9633,479.293,188.399,380.9755,268.6575,120.3467,-322.0305,-255.4894,-377.515,56.9153,-133.9486,156.2546,-428.9581,-54.994,28.2146,158.7121,-426.7307,491.0086,-150.7205,-233.1005,244.5174,45.911,-406.1181,233.1636,175.9334,414.2805,421.7396,-322.8029,-252.2412,35.7622,318.5223,-141.5121,-375.4407,380.3081,222.1228,443.7844,367.377,-202.9594,-493.6231,-184.2242,-253.9838,463.1952,-416.3887,252.0867,-63.5317,411.0727,98.6261,330.7369,363.5685,-498.1848,-413.7246,-2.5996,-238.3547,-355.6041,-303.698,43.6266,383.1105,-72.3066,274.7491,321.9322,220.9543,-30.5578,400.0891,-181.7069,-386.4403,497.2206,-408.9611,138.485,-133.5666,-340.2569,-223.6313,270.884,-215.9399,74.3931,-244.1364,353.4219,-156.9905,488.3148,96.352,401.8525,-468.8344,129.9715,-27.1953,-168.631,187.7049,-336.5255,331.0652,204.3538,36.0182,366.8502,-468.6579,478.1409,-332.6136,-281.8499,63.7165,-458.8161,14.8894,-145.6397,267.1499,85.2025,326.3764,-419.6361,-133.9626,102.0618,443.3099,-207.9032,132.7032,234.001,-26.0754,105.6478,174.1252,-403.3511,-164.9714,-262.9344,-58.9668,357.6414,355.7508,-331.8443,153.5733,417.5712,260.7394,-150.1053,-435.6525,-364.1558,328.6183,-270.0863,107.1746,345.7998,480.8749,206.3896,-498.237,495.0835,481.9384,418.5571,-246.5213,-363.7304,311.7076,-53.1664,-297.3839,122.3105,-13.9226,-145.9754,-189.1748,460.9375,194.5417,-28.1346,-261.2177,-88.8396,-254.6407,-465.3148,-169.5377,24.3113,-116.2323,-420.3526,317.2107,-231.6227,-270.8239,387.8598,412.4251,428.1373,308.2044,275.2082,402.3663,-209.9843,-492.7269,225.1948,326.469,207.3557,-131.7677,371.9408,-139.3098,324.205,-126.6204,-335.0853,-248.2587,-344.907,307.2109,-441.3296,-318.027,414.6535,172.0537,-280.4991,331.0475,-158.0178,-285.1951,12.3632,149.9347,282.8302,-91.5624,-180.6097,496.0881,368.2567,357.6875,-194.2106,48.9213,-479.2956,-165.139,238.7811,302.7007,297.2805,208.7099,-5.5755,-85.7911,-358.1111,344.6131,415.7199,-219.1525,490.5003,-46.0096,498.2818,-91.8067,384.0104,396.1107,408.2827,-5.3919,-333.7992,-168.985,273.72,359.7125,227.7621,158.3406,-366.9722,3.7709,27.2728,71.9754,269.5792,-365.281,117.9152,-184.3682,356.9013,-142.6579,-496.7598,122.0194,89.1247,4.1914,-81.9905,465.0841,115.4727,169.6116,-199.9951,-223.3149,-447.3022,11.831,320.2368,105.1316,344.2462,8.6333,62.2285,-70.3944,-284.6694,-482.4229,-448.1569,-237.7858,222.3921,-172.1386,-312.5756,-390.0565,398.951,119.9784,-419.6537,121.3186,481.3011,-181.6662,-56.0219,424.1359,7.1461,138.8567,-307.0606,334.066,254.0897,473.7227,45.5936,133.7268,49.5334,-283.3406,179.4466,105.6191,-30.4162,271.5774,6.1156,110.4732,286.4325,13.3431,494.0139,-371.7624,283.3652,272.0558,-302.343,122.7245,-463.9261,299.9807,282.4502,-262.4911,183.4289,222.7474,-229.5973,141.6188,262.5468,278.1155,-331.0891,-393.6027,-230.1461,201.6657,-93.3604,-395.8877,-125.2013,-222.973,368.3759,234.6628,-28.6809,-151.0703,432.0315,253.1214,430.7065,-143.6963,499.84,85.1683,280.4354,196.6013,139.0476,120.8148,-398.8155,-335.5504,229.0516,403.8604,-383.9868,-79.975,-152.77,220.4036,135.0355,238.2176,-242.3085,-177.0743,381.8202,411.167,378.0153,456.5976,364.013,24.2316,-395.4659,-210.2581,138.7539,479.7398,-291.7797,-123.0491,188.9817,42.8931,-354.4479,358.853,-43.6168,-190.6656,-103.3037,47.8915,-358.5402,374.9758,493.9951,-427.2376,-119.1142,-453.2975,-326.2696,-212.8273,-142.2931,-179.795,355.77,-156.2903,331.2006,451.9252,185.2944,-96.1941,173.0447,345.2744,43.0151,381.7845,-143.4125,84.654,-208.7053,-293.141,333.6349,-80.472,-376.9817,214.6298,-43.0931,-254.7834,-421.6961,-368.844,467.5544,-418.61,-66.6824,-350.2671,348.8241,252.3495,41.8677,-128.869,90.0391,-136.7405,-136.7822,489.8074,-396.8204,63.8355,323.9557,-83.6674,451.263,152.8955,-291.7497,410.0787,-299.7468,51.34,-298.6066,-58.853,325.911,-281.9541,-15.3457,299.1325,-347.4959,388.407,343.1096,28.1816,24.3013,-111.3312,190.5583,279.9848,-479.8894,123.2182,233.8425,-466.2128,-134.7122,217.8674,432.9523,-186.799,-477.2512,-223.5514,64.274,141.5251,-161.2187,150.2791,-228.1087,81.172,451.0879,-230.3818,-304.9398,402.1081,199.1266,275.3423,-123.9548,-21.1815,-384.544,446.9626,208.9692,-337.4827,-58.1011,344.2642,230.2868,44.9176,245.9885,-284.1875,-351.6104,108.1289,459.649,191.4334,53.591,136.7139,10.5912,-15.8411,62.8305,448.5256,194.7705,-356.3214,84.4996,-133.2502,-358.6308,262.7949,219.8741,-355.3985,468.2922,243.7227,-408.3166,188.6111,-221.7264,-286.8234,-340.3046,-224.5375,332.2615,73.2788,-24.7857,-485.2204,-136.7196,-162.9693,92.6017,-99.611,-186.5203,495.5483,240.8051,409.6493,-58.1321,-154.1239,-335.9719,-82.4408,-471.3057,-43.373,301.0884,-96.6359,-236.6906,435.7313,-227.7263,-406.8904,-392.3187,169.0043,-371.0852,-271.3652,-57.4466,-196.8455,52.741,361.7395,-117.8599,190.5339,276.6457,-321.9851,425.881,-473.2662,-74.2968,221.3612,-465.4429,181.723,-78.4508,21.6152,148.8107,-166.1687,-281.6391,-462.3636,-420.5255,-161.4143,98.8383,-374.5345,-366.2851,187.1506,-405.1865,239.4847,-246.8352,33.1748,-344.1211,477.9759,-294.1354,-359.5015,-44.8454,151.7072,-22.7324,-260.3293,99.1414,-20.5536,173.3766,-422.6692,458.3853,-199.7898,-236.3929,365.2599,-66.4191,388.3472,283.0336,-268.9463,269.5704,360.9679,-322.102,-407.0705,-93.0994,338.9108,-189.1359,-216.9102,-249.0153,122.6058,-254.8318,-112.2771,-279.0506,-168.4431,392.888,394.7607,468.0544,340.1852,-293.1288,-8.2912,-419.2608,323.3382,-93.8793,-242.0672,427.7716,-441.6906,128.3229,424.4679,-71.8586,134.5411,-74.5205,18.4141,17.7277,126.9123,-137.6119,33.3783,222.9912,-279.3582,89.1226,-90.031,12.7221,98.7767,-80.2372,-485.9212,-481.6575,-325.9729,318.8005,-433.786,-296.6337,421.6515,-27.2786,-445.2456,451.8876,-482.1014,-143.1098,186.1258,-90.2432,-297.7479,-351.0026,-423.7518,-219.6096,-269.2043,33.5767,-325.4335,392.4866,-418.243,112.5852,-248.1306,451.2154,-419.2995,154.5752,483.6323,-315.962,-196.872,406.1769,-356.9868,67.5251,-255.6475,103.5181,-450.4418,386.9518,456.4057,99.4591,-166.636,275.5374,200.4925,99.7623,292.6794,-422.3998,419.4837,-466.548,-462.8519,-381.4489,472.8356,-129.9563,441.4941,-376.1232,-114.1945,233.5531,313.6963,394.9503,-278.7558,350.7515,47.9427,220.7074,-178.9789,-346.0485,-128.5665,8.9461,159.9838,-57.3637,351.9478,-65.9411,-258.1788,498.9494,-472.613,-428.5678,17.3981,-435.3682,-421.155,-54.9177,-490.2348,178.3777,-31.9618,-242.1805,362.3736,380.8179,446.4272,-23.9142,61.3588,-489.5704,363.6446,-186.1519,-351.8684,-322.2791,-226.0431,404.6996,203.9824,306.0958,234.0145,-180.4996,452.0633,257.171,-83.6197,-393.152,396.6934,32.156,-428.7645,183.7886,494.767,68.3905,278.9785,-40.4759,261.7298,236.5778,4.5577,-130.9582,433.2837,-298.1139,-107.9822,-196.8446,-121.1765,-292.5509,-246.4546,-258.6038,280.1334,-52.6511,483.2928,-185.7577,-75.3705,351.3411,179.1282,-479.3838,166.2733,-197.9043,282.6848,-50.4744,-492.7178,183.6435,-127.2379,483.646,433.0805,-228.5488,139.8314,-145.1337,-403.1749,306.2704,122.7149,479.6928,85.3866,108.095,-224.152,494.6848,-368.4504,-180.7579,61.7136,51.2045,-383.0103,-376.4816,-292.8217,-201.118,332.1516,425.2758,138.1284,-229.4302,432.9081,2.9898,-437.7631,-448.2151,129.9126,-170.2405,499.0396,-48.2137,363.8046,-423.2511,-28.0804,-267.826,-356.6288,-99.9371,-409.8465,170.4902,-269.2584,-277.4098,300.8819,-142.5889,339.0952,16.2275,-310.8646,201.0733,-495.5905,341.9279,-149.1184,-494.4928,-81.7343,209.9762,273.4892,380.3163,359.2424,-242.5,-42.1268,-303.9792,11.6018,361.5483,416.4178,10.3282,195.9796,148.8096,-60.9724,-205.5221,-145.4574,-341.5913,426.8996,-19.5843,60.6265,-133.4191,-139.8737,281.7465,461.2854,-270.8902,61.0182,-58.6791,-254.0193,-234.1206,-208.7334,39.7498,-14.337,-68.2319,-342.2756,403.6834,401.6122,-166.1637,47.3592,-325.7,274.5459,343.4873,328.3783,-370.1657,-122.8967,-231.3182,122.6609,119.2685,-223.5437,-210.8076,116.5022,340.2814,256.1852,-217.3487,-150.9598,331.1343,-453.8182,-448.0842,-95.2475,-340.9942,-416.7835,-96.7226,-328.7212,-373.4337,472.2214,-484.522,-465.1583,330.0712,73.2052,-55.1266,-352.8984,341.0742,-230.4845,321.0752,236.2116,35.1902,75.3489,-469.4042,110.2036,35.1156,454.7224,103.0685,-221.7499,-23.6898,-259.2362,-110.509,-261.0039,219.2391,-139.9404,155.7723,377.9713,434.0318,-365.1397,459.1471,-318.5774,323.4256,194.325,-311.9529,-153.9019,-346.5811,76.4069,443.2121,-199.407,495.6636,-138.5213,-145.3432,-151.7758,-365.3547,263.6507,-491.1686,-183.5585,-12.6044,318.5346,-443.8639,-179.0338,477.9093,-355.5118,-423.0035,-229.1166,-96.7782,-479.2384,192.9085,223.3407,-302.9472,297.3847,477.584,-297.5958,168.6023,-80.6912,-89.8717,87.1476,-129.7807,346.5576,-253.9729,-399.6858,-389.5785,35.1648,-180.451,-49.6084,83.9582,-185.2329,97.283,195.5249,-91.6969,199.202,-449.792,333.4825,-113.7558,443.434,394.3587,-94.9074,71.2092,-251.1774,-85.047,-46.4004,20.2595,341.1073,-91.2527,86.3775,303.1247,-336.9011,343.9894,-384.1261,154.4411,-465.2493,-63.3249,488.0231,348.6725,458.2093,322.401,220.2532,283.3734,-386.4252,-256.5262,-87.2205,96.8199,47.6908,-399.6307,214.7716,-19.9177,-458.513,-194.3218,-320.5342,-275.857,-301.6955,-84.9038,358.3475,-88.9271,499.7721,-161.7403,355.4894,313.6211,-176.1703,61.8427,107.603,-176.063,-426.5408,292.3612,58.3331,-115.8853,471.4131,-76.4815,-309.6263,361.4518,192.4763,-145.7968,256.3888,133.335,-474.0901,-366.9793,-495.223,457.2366,170.056,285.0152,89.8213,225.2251,354.1822,-298.374,-332.9164,-55.2409,306.9283,25.9392,218.0624,7.5085,-151.8768,-155.4932,6.0001,201.4506,-259.9874,485.1078,-362.8516,-230.1434,-398.2512,243.0012,32.302,-197.91,144.1195,-89.4196,-44.0399,-371.7866,227.6007,492.7526,499.3824,162.2475,279.0325,177.0781,341.0137,199.6009,108.1678,312.2319,-211.5001,-92.675,357.0513,-337.924,-348.984,-350.3677,173.3473,-193.7346,-318.5609,-2.0928,46.6287,-346.8513,36.634,-277.4949,-149.325,481.1378,370.3864,-139.6689,-332.2805,48.0292,109.8363,494.6994,373.6992,495.7442,400.4998,-26.2276,-308.7669,188.9497,257.9182,-116.6944,269.8932,197.005,123.1139,-356.2058,485.1982,-4.0119,397.8434,-204.67,-494.5133,-414.1299,142.1512,-36.5446,390.0718,6.9876,263.1216,457.5598,89.6086,-266.3804,17.3457,88.8182,236.6271,81.175,-170.2249,-5.7664,422.7852,180.3349,-135.2642,149.2285,-70.6607,-46.169,-389.3313,230.6125,388.4853,-438.3426,111.8034,300.0416,37.5604,-437.3868,-114.1336,312.7777,-99.1161,-312.9015,-147.3787,-434.0536,19.5034,141.706,-281.4504,-208.9608,281.4619,-361.0596,-464.2757,77.8205,232.5575,165.4104,424.8738,124.5555,342.038,86.7543,278.0216,311.2686,337.834,-90.0545,-210.1143,-488.4095,-80.7535,92.3731,-122.622,-288.0571,1.7285,-5.2998,100.0717,-395.0571,-477.5587,-160.5642,-119.4214,-232.233,415.7276,-204.3216,-436.7766,-103.4644,-427.0939,-31.0927,-440.2919,120.5971,-223.3623,-199.0988,304.8697,432.5731,-231.5791,-397.696,306.4134,330.1018,32.4345,-175.719,464.6091,-291.5686,300.1631,-167.4592,238.9574,104.5893,-187.2215,-294.0111,-361.9094,480.6847,-304.2133,-448.7144,67.7235,-255.9669,254.7379,464.5465,6.8909,-368.7554,337.5993,39.1928,-376.0625,433.4224,-109.1488,341.7731,377.843,446.839,-192.283,251.1592,437.6812,-478.3409,345.7668,377.965,125.6188,-462.0904,-235.3324,316.8892,-460.7371,248.9306,418.7082,-333.7257,-104.5062,-408.1356,148.6624,-158.4929,-477.0664,80.4926,-214.6292,211.3377,322.7854,-312.851,403.0215,-213.3089,-71.3355,-276.1068,-293.0902,-277.4559,54.2176,-119.1285,-479.4361,-492.6072,8.3732,42.4988,-5.576,-198.6151,-357.0952,-331.5667,186.6195,317.3075,201.267,-37.1731,-278.3164,-467.7796,-163.3909,-117.305,-233.9266,277.7969,181.9723,178.8292,-168.7152,-436.041,171.345,369.0302,423.7144,434.0961,-428.1816,23.7334,-136.6735,-222.4486,180.8461,57.5968,129.2984,127.1866,-109.3928,-143.6253,-385.9948,127.9867,-8.8096,-239.844,66.6491,-50.7301,-309.1113,-474.6991,212.1767,-444.4596,-211.3601,351.3551,335.0507,-128.6226,-98.5249,-257.454,489.8014,-378.8622,311.0304,-4.9107,362.7586,-458.8825,373.2779,-103.29,-5.6216,122.0183,76.9731,17.8771,289.8893,-56.4338,375.9665,-83.9991,440.0823,142.2309,-471.0813,-59.4847,-400.4217,91.4892,374.4009,486.8697,414.5213,-0.3535,-278.2345,-231.206,-238.479,389.3143,-276.9742,-33.9869,349.1201,127.3928,-410.7213,337.3789,36.4048,333.4291,-12.4075,483.8778,311.4489,-74.0628,-379.6051,463.234,157.5614,-140.9455,120.7926,-161.2341,194.162,-412.6181,-9.1258,-194.5065,441.1572,255.5455,-73.8086,-119.4013,-486.4792,-27.4352,98.9738,-119.002,-75.5589,261.7675,156.0993,89.6457,-190.6318,429.9325,195.9536,-172.6155,-22.7976,438.9412,-246.4661,447.7281,434.5346,405.8957,217.3324,392.6129,-158.604,15.8632,483.0414,334.7693,-307.2482,302.1267,-7.4125,3.8081,-405.7316,377.5069,51.2307,235.0695,269.737,-389.3487,186.4225,-36.8521,401.2051,-59.0378,-190.8023,-182.8076,-362.6136,-124.8064,362.4142,45.3344,-330.1214,-162.5452,-434.4411,219.1143,-374.1038,364.5639,-268.582,-22.9247,-73.8849,-54.5258,-23.0882,167.9233,-181.9807,-207.1173,300.2193,206.5903,-72.013,-244.4396,-435.5389,10.3523,-435.3545,-138.8392,449.8426,-244.8971,229.7666,267.5225,-401.6021,466.3278,418.3623,-317.8205,28.5192,384.5628,-79.6177,469.4532,-395.1986,-353.4477,-93.6914,70.3999,-441.0627,-201.1221,141.2748,433.3389,82.413,-394.0046,-438.6836,453.4704,-160.6535,353.0374,-238.0377,236.5195,497.9019,202.9472,-421.6417,-382.042,84.6308,430.1599,-390.9918,-195.0401,255.6526,-86.5964,-491.667,-199.1557,-102.7114,474.877,-292.9154,-77.3163,143.5625,58.8126,-284.8908,-457.6457,212.5317,480.4032,-324.0829,491.0165,-494.7934,267.4311,-142.2401,-368.9058,-370.4955,498.803,-6.7377,-395.373,177.8868,306.9761,80.4185,-239.1253,-435.1349,7.6298,-157.6242,348.6095,475.7845,317.7116,-353.7336,-40.2881,353.7096,-60.9783,-385.5816,243.8071,-398.8341,62.343,340.0251,-24.8105,-343.4186,189.6737,-467.3026,104.7127,159.5467,-482.5496,71.6951,-163.5304,-321.8438,185.2875,-331.6885,-102.6817,-242.7548,-259.4407,220.6898,231.6571,-297.1145,-186.9472,-316.9286,-36.2392,-293.964,296.3878,467.7409,-277.6389,493.2143,417.1244,12.241,-343.7893,-33.7207,457.2978,-248.9726,-409.5439,-92.4779,-173.7584,400.8483,59.7439,13.3265,-175.617,37.333,-307.6469,-82.3687,332.578,-412.0079,144.7037,350.6506,423.3235,-53.2147,67.9581,-447.3845,-461.0187,371.1702,386.2045,352.2722,-119.098,123.9178,-52.0535,465.2626,474.0272,402.9961,491.4763,-33.1373,-228.8607,-383.3299,408.8192,-275.155,489.8633,-349.5073,346.9781,129.3929,282.1868,-77.3384,277.3026,412.3277,263.6705,473.3756,-437.9988,114.1686,-452.3331,-167.8898,-193.6217,444.6168,-354.3223,-238.0967,432.0883,-349.7249,-42.3659,-304.7343,296.2192,-136.5386,-121.7774,450.4678,140.5384,-450.8993,93.8942,-54.4945,498.521,-461.7182,111.5166,-397.6007,-397.959,-20.9331,-19.7068,78.551,161.9472,-24.8682,-434.4537,102.9447,214.298,-494.3813,211.6782,64.8196,372.6962,-399.8337,114.5476,-191.0045,-369.6465,-391.7201,-204.9951,-201.7654,475.898,-262.3247,-348.6974,79.4062,-112.4281,-102.266,67.3008,335.485,68.4289,-433.9104,-392.963,-73.3788,276.5766,-105.2219,422.6201,192.915,-388.3541,242.3915,479.5633,42.5998,259.6189,-316.5861,390.1121,-216.0274,-373.296,103.7169,321.9107,19.0023,487.2627,151.6922,276.7424,461.6928,24.4758,133.263,-47.289,-413.9538,435.2414,-466.9724,-270.6602,238.9442,-110.5389,403.5151,-395.4393,-208.2219,-53.0773,-26.5792,-387.6534,-120.5566,143.2237,-305.3778,442.0665,417.9523,460.3337,254.8689,-375.9436,-101.0153,232.4727,-35.5285,-470.3007,-423.9161,-108.9997,-29.6555,233.1043,240.4766,404.763,276.8465,-354.4058,74.0678,-343.244,332.9786,361.2964,-322.0828,-41.1861,-122.8074,-299.5682,-481.218,-157.3994,310.6317,-261.176,310.2644,-239.9855,255.1004,-311.3351,437.9486,78.1311,-133.9261,-176.2119,45.9943,492.3169,266.5795,16.8553,-470.9413,-331.2718,218.4122,369.7118,-179.3201,-165.7277,-87.9832,357.6499,-261.0345,442.1609,113.2997,-112.5643,481.2426,-365.4958,400.5374,-395.085,303.8103,-292.0268,167.0744,-199.013,174.9283,498.3585,-337.466,303.9078,-326.0901,-331.7143,6.7189,-277.1371,-204.9097,-313.4259,-462.7296,437.8485,267.2872,157.752,143.8784,60.1304,-492.991,326.0132,-123.3415,390.8461,-293.0175,483.4759,240.4338,271.6879,483.4801,391.2687,238.3995,-246.607,-411.7722,-257.9864,238.0949,494.3455,-489.0838,-26.7283,317.1161,-264.0242,-16.6819,-141.4839,429.101,252.2336,-325.1541,471.044,452.352,7.4546,343.3004,-336.4424,489.6317,307.1831,-139.2075,153.572,-332.5617,-361.892,110.6459,-384.8117,-423.0834,-277.9929,44.5303,167.9458,364.1204,-222.5008,-148.7923,198.4694,-74.0043,-458.4327,-227.5346,272.4441,-477.2587,303.1998,72.3129,112.9422,-98.2577,296.903,-489.0569,-461.4503,-381.6239,-440.6212,-354.1834,356.1583,-220.6533,192.5295,-409.0818,-264.2973,498.2192,-306.675,-313.6103,-124.9266,-436.5922,297.9051,121.9351,425.3888,-283.9925,-360.441,-347.4517,8.6814,477.4163,-344.6926,-311.574,-199.9541,-272.862,-360.8642,-306.0856,-218.9529,200.1938,-187.9337,-149.341,-431.5156,-135.3958,131.1299,262.0532,-210.162,353.4392,-249.2969,216.4223,499.6139,215.8176,-346.1569,177.2202,-173.1132,-466.9007,-310.9848,463.485,6.516,-334.8823,-282.7409,-375.2367,-127.4937,257.2427,384.9285,206.4053,-283.9167,369.6312,-325.1146,452.7523,-103.9792,-51.036,153.325,-344.1749,289.4824,109.8308,375.2284,-249.8481,367.8478,71.0143,471.6136,-265.6336,12.9061,-470.1288,-113.547,38.8925,-205.7232,418.6063,475.6095,-18.8731,-431.5545,-288.6452,-406.8928,79.4828,-152.1474,345.565,-200.8038,174.7789,379.2991,-385.1188,-217.6888,241.9077,-449.1824,467.832,186.0095,-82.8376,-450.7827,-32.2903,-288.132,169.8581,-275.3198,-388.1222,-431.3601,64.9652,368.9351,107.4999,408.8666,267.7858,-462.4349,-198.4615,378.1182,252.7529,-344.883,-364.0161,-124.6144,-222.8902,-103.7114,387.1701,-363.7944,-237.934,230.2082,-63.1276,-456.8188,361.9248,461.0643,160.8127,305.6079,81.2236,-322.0002,-273.4727,-356.9758,227.4751,278.5386,-10.8627,49.6988,-495.2527,428.0901,393.6169,-360.5547,-137.0244,26.962,-326.3379,-399.4972,449.7645,-238.7444,-69.8461,222.6126,-68.7657,132.7567,255.7355,-190.3762,271.6129,405.5764,115.8834,0.9645,331.1665,396.4585,217.4435,-323.6914,39.5915,282.4489,411.3888,-219.2131,240.8913,-109.5264,-438.3067,-157.3961,-180.7485,-258.9153,61.7008,483.4718,-386.0406,-499.1824,-90.2675,-358.5152,-79.3051,-97.4094,-91.7246,63.539,-307.0526,226.416,-454.475,-375.7449,300.532,409.7526,7.7042,-320.297,-244.9896,-282.6645,-414.9866,-331.4623,316.162,348.8361,-342.8609,477.2374,6.5636,-483.931,341.3556,498.2318,-46.3428,203.981,101.2793,128.4547,-285.068,56.5149,-407.6478,-151.4672,116.6673,-115.0498,-491.7974,-151.9475,474.7827,-288.4179,286.4447,-430.6331,-279.1458,318.721,-276.8375,157.9586,-9.2346,398.8374,380.2256,61.1557,13.0746,-80.139,-134.8798,-37.6466,-209.7381,236.1511,388.5629,-196.1123,-481.5887,327.8334,408.2074,479.1439,85.082,227.7623,250.2644,-47.8238,464.8471,-431.5099,489.9794,452.9999,-50.8695,-429.0862,-138.8555,-395.3346,391.3405,-249.4682,-280.6761,-460.5297,1.0129,199.1008,-97.4134,-235.0172,-466.1287,-302.7993,298.4108,-22.478,173.9936,122.8033,-235.0353,231.5057,-97.2265,-203.8224,457.6806,484.1385,-309.3619,-168.3588,-177.2797,-3.9408,-279.2997,104.4862,-139.4921,-450.2539,402.541,-437.1151,-337.4914,-200.3446,-164.484,-293.7216,471.7414,192.6153,233.1926,-122.8377,356.5476,450.1361,-400.0941,61.0466,441.7145,189.7192,-69.6348,252.5418,-246.5242,-344.0219,14.2904,87.2185,-119.2684,205.422,-374.4802,33.4042,81.2271,-2.5025,-138.6816,8.1989,-439.7698,-446.1887,-374.9012,160.9795,49.3705,72.7925,245.9454,-138.7558,11.9923,414.9421,5.9535,-142.9589,396.2571,-222.2068,-2.6172,-90.5871,346.7415,-337.3213,-372.4473,91.8271,310.6442,263.7468,-357.0433,-246.0827,25.4967,55.8069,-64.7183,-342.7375,-356.7083,70.0885,-79.026,-346.3906,206.2687,-440.6602,321.8775,223.3025,159.6939,292.4308,241.077,-219.0901,495.9946,0.3506,-166.4262,475.1836,-272.5527,118.8711,458.2456,353.3839,-82.5653,37.2834,-92.4387,146.5082,233.4743,-408.0537,-469.9263,148.8959,-324.352,498.608,-324.5319,-114.6779,-200.4192,404.8448,-289.7989,400.6151,-372.9065,359.7581,141.4237,-304.6837,314.3738,-302.4693,442.6138,-224.0818,270.1887,-477.1098,429.0239,264.1871,26.84,283.4518,129.5215,6.6673,-91.4464,75.821,261.5692,-403.0782,-213.9284,-356.8221,-232.4484,33.5696,99.1931,344.0097,187.4695,-264.0572,-199.6103,342.5485,187.058,31.5948,-275.4046,215.9846,425.1114,327.1992,437.8426,-281.2049,71.7953,393.346,-339.9023,-78.8502,314.1866,-120.7207,-416.0802,-327.1001,413.6143,-236.2051,247.1197,318.5011,-194.295,486.3421,409.0831,252.6212,-452.654,-215.7497,-464.1643,61.9033,66.4139,-425.8918,-401.3522,-395.1639,427.7052,-264.1728,131.9144,258.4416,-442.2357,68.3167,441.5518,138.4774,470.7538,-14.6434,-436.2225,385.0708,286.1155,323.9014,137.4596,-352.5503,1.9307,-314.7656,449.5639,-468.3008,81.2499,487.4562,270.1387,-445.3627,460.1174,-205.2539,-32.6044,359.0438,-115.5841,-268.6624,-495.8554,-474.4781,337.9834,-281.4488,252.1636,-33.645,-26.6636,193.8834,287.2377,6.9748,414.4343,-211.7143,-23.0035,-226.5275,-400.285,-336.3935,28.1908,244.27,21.9938,-222.3759,-103.1418,464.7943,-256.0156,46.7511,-487.2509,-321.3631,479.2142,328.166,-481.2039,253.4962,100.2875,-399.98,-81.5868,289.7597,-318.7266,-264.2078,129.4063,407.6828,222.8346,370.0391,46.9838,-356.4992,-305.9992,-258.4048,-410.7736,-245.9092,32.9185,-237.9085,-403.8853,12.0239,-164.6252,107.369,8.0379,-139.3796,365.9266,-448.5863,314.1141,-280.0686,-463.4747,2.6092,-376.8811,96.7462,242.419,-480.9968,345.3697,328.281,39.0387,-342.3026,469.0461,-103.9411,381.0458,-141.6771,-4.7988,289.4799,-55.0671,-292.4788,364.1267,-395.9876,-232.5859,-285.7012,-444.7762,79.5454,251.5539,359.3705,467.2154,273.1778,-373.8216,299.611,-464.32,-106.0638,491.2626,-39.3721,-110.1154,383.4063,45.0848,262.2361,-111.754,249.0826,-305.9751,22.9663,-120.4794,484.0797,151.9063,388.5088,105.9067,444.0361,-45.5696,243.9313,303.4003,-27.795,-7.2151,411.6561,-100.6193,-207.3277,-6.4576,-300.3722,118.2638,342.3654,66.7861,104.0615,180.5752,281.6788,-342.7549,-65.8778,140.9091,-169.8935,-437.2435,-392.4147,-348.2217,202.3684,440.4071,-276.2247,129.5096,-43.4059,-456.876,-445.1126,-193.8847,-156.3408,274.7116,-129.6168,-484.7027,214.0806,375.6649,444.5303,-71.8577,-474.5957,-342.2716,-322.7281,205.6087,-14.3469,-283.0586,-86.2198,-420.3924,182.3599,22.7485,452.8141,-286.5839,155.1115,-316.4854,-28.3824,56.4873,-146.001,378.2396,473.2566,380.2417,-399.6208,-347.9016,206.5985,-145.9688,-219.9708,-216.6865,404.4334,324.8516,55.3154,-119.4645,-79.2847,-191.5158,-136.3728,413.3355,356.7344,-437.7335,404.9099,-494.6143,135.9107,151.2158,-161.0672,451.0975,-93.0876,495.7659,321.2577,-451.6211,-311.9214,-432.4626,496.8637,382.6126,97.7431,245.2208,-462.5156,-274.939,116.6882,80.6219,315.5602,-342.4345,274.387,-418.7591,53.5711,-96.2339,271.8546,-46.8098,150.3864,206.6682,311.9593,174.7625,-198.5948,105.6143,212.7571,237.4211,-21.2842,-383.0439,285.4973,-80.4955,105.5129,-158.8626,-156.2353,98.5192,-308.2654,-92.7883,45.686,-380.6921,140.1508,365.9526,108.1565,-140.4508,-246.5095,133.3693,-4.6582,-20.843,339.374,-99.2908,17.8824,242.8291,75.8953,-441.8762,-352.3943,-484.0549,-401.3674,321.6953,213.7102,261.1824,-41.5899,65.2736,-26.9977,152.9615,308.5357,-211.4979,477.2073,-414.7828,-330.2034,-123.7898,-261.1105,-328.6632,-15.1514,438.4531,-323.3771,-173.6672,-293.5578,459.1075,-18.34,-270.1311,-315.6445,348.4226,-435.2806,-419.9553,-106.1863,-283.0003,43.5508,-18.0891,224.808,406.4155,-163.6988,-129.2904,207.8322,474.5666,-60.1079,9.563,44.705,118.7999,-301.6795,-38.2161,410.4003,-190.4926,-430.6086,1.2693,312.7535,-455.5725,-271.7346,-159.4378,-227.9918,312.9331,166.2825,-31.7905,-227.9038,-421.644,296.5264,-335.4129,413.344,48.8782,217.3682,434.8719,-387.0484,170.5191,201.0157,127.1522,474.5561,-100.6847,-434.2549,29.5853,-467.6037,184.2936,116.9028,124.6507,-497.3002,-86.4991,59.6243,-104.9888,-294.6228,223.8354,-97.9298,64.2283,203.7397,186.3586,64.5045,122.1795,439.3753,464.9225,434.9882,85.5836,259.4985,70.5414,-117.1196,198.2037,-127.745,-200.2022,-386.0653,1.6688,272.3237,211.4442,445.0575,479.2069,-354.0842,-211.1788,160.3409,258.6131,-71.1154,-196.203,-95.1323,-398.3867,70.6868,15.5394,333.5079,187.8193,-393.7479,269.1152,-336.0885,339.4546,-147.6351,186.847,-126.4872,-108.1731,-70.3962,-389.0454,135.3408,-51.5671,4.6139,-3.1587,-274.941,-208.586,171.0845,-277.1015,-104.1653,-260.934,-310.5456,290.0738,-38.1867,-254.3353,31.6405,433.6526,86.9343,48.5563,137.4622,-34.6388,-1.5028,-452.3147,349.1007,-347.9019,70.4255,-201.5194,-430.2517,177.8199,-391.6226,20.1876,-287.8148,-190.1158,-356.0897,-319.7011,87.2696,-141.1962,-137.9268,-70.4841,95.4435,16.2261,191.5316,-214.8942,142.0224,209.0575,180.5105,26.1511,-497.0902,-186.2708,441.5505,-7.6379,23.9577,-401.2169,-339.3474,16.9572,269.8157,178.6692,299.5455,-367.3993,-413.7073,-96.9188,-472.0939,-327.975,129.6294,446.5669,-32.714,-120.6079,71.7334,190.4871,436.6714,110.0289,-108.4299,8.0033,-341.055,77.7304,-196.1335,-343.1391,-152.6897,-378.0097,-106.9584,395.4607,-98.6717,-131.0531,-140.8907,-185.3101,-68.8474,-478.2088,-18.3317,256.0313,-119.4212,334.7436,318.1335,-20.8287,-147.7622,118.1926,-218.2094,-478.7367,217.0914,219.1878,75.2151,231.5097,-410.8572,-46.2061,153.4654,264.0178,144.8928,-115.1857,-369.8591,126.6643,-122.1998,480.7727,-85.4362,134.3245,-34.403,124.6945,12.1795,-184.8116,390.6826,87.9712,367.0822,-233.2724,-245.9838,104.6339,-53.7753,-264.3381,50.9031,-122.0604,136.6276,465.3429,288.8934,5.7445,-325.7759,53.493,-441.8264,-271.3847,-371.3886,-272.7637,-102.4757,-358.4499,-143.2793,-64.6363,499.8284,-155.8017,-37.8801,63.5318,-377.6101,125.3457,57.231,49.3608,-245.5766,-47.9802,383.4127,-114.1047,-30.258,-479.6988,-194.4846,368.4079,466.1545,-26.7084,8.2433,74.9479,-155.4871,494.9634,-196.3082,-206.8022,423.2288,-494.5835,-291.7666,-204.8478,396.6,-418.9048,-130.0584,-137.5258,-440.7922,73.1423,-251.5694,356.1615,-34.088,-23.3318,43.2522,-297.3896,409.686,-305.5675,424.8321,-154.9096,181.7696,-87.5939,-151.7475,-319.3074,227.2369,-113.0086,-68.1299,368.0398,-20.3706,-296.0095,-269.9336,-250.5127,-56.5895,188.9818,82.7481,488.6398,-151.2088,11.8563,320.4209,316.3155,317.2716,-185.4569,128.2219,108.4381,-453.2648,-406.1359,-414.2863,36.6919,-160.1338,188.7767,364.4688,-13.3882,233.621,11.2764,-154.8894,424.1841,-128.4954,23.1408,183.1928,382.2918,-464.2506,234.1366,-447.21,-425.1161,66.1712,424.058,299.3596,372.7703,-162.3764,-37.8575,-468.5142,189.9036,172.0345,310.1368,-459.7659,-219.5317,-68.9306,211.4315,-408.8232,215.1716,-134.0617,367.326,385.2393,453.6431,-258.6041,194.9712,-266.8576,145.4018,-406.4884,119.3747,466.6835,-404.694,-480.8574,-3.1007,-48.0469,-70.915,-229.4956,-69.6999,-114.9404,372.8744,-247.5689,250.4333,252.9375,71.5672,323.3984,268.7582,16.7518,-258.5373,252.518,378.1721,-197.3271,-211.1179,444.2923,-152.2646,262.3183,159.3338,259.6788,271.9127,-26.8833,403.0915,-56.9197,-445.8869,-108.8167,417.8988,13.4232,-281.765,-405.8573,262.7831,-279.493,328.5591,-453.3941,-116.0368,435.4734,-439.0927,-332.9565,355.4955,324.9878,33.3519,-165.0182,188.1811,467.3455,185.1057,-233.8598,-17.6827,283.4271,-329.1247,-402.9721,404.7866,-358.7031,-267.4074,441.8363,320.2389,-128.0179,339.544,196.2018,-60.2688,336.0228,-440.1943,318.6882,-158.2596,277.0925,-487.4971,-338.9865,-275.716,136.8547,-253.6206,-40.2807,-357.0971,188.0344,-203.0674,449.9618,-223.2508,468.1441,302.4002,-65.0044,342.4431,205.6774,-118.636,-29.9706,183.9825,223.956,314.0691,137.0129,-8.0452,-15.131,-269.8643,-12.691,228.9777,-147.8384,-347.1117,-283.1905,459.2004,296.1321,-483.1799,414.3423,383.0187,-408.5525,-286.8169,482.5853,9.5232,-459.4968,-333.2521,109.0969,129.5107,43.4369,455.8283,-4.0423,-318.5019,339.1641,416.3581,-309.0429,84.2325,-355.8753,264.7671,43.8922,-298.6039,412.4413,19.4198,-251.279,-191.157,-478.2058,251.5709,-178.9633,479.293,188.399,380.9755,268.6575,120.3467,-322.0305,-255.4894,-377.515,56.9153,-133.9486,156.2546,-428.9581,-54.994,28.2146,158.7121,-426.7307,491.0086,-150.7205,-233.1005,244.5174,45.911,-406.1181,233.1636,175.9334,414.2805,421.7396,-322.8029,-252.2412,35.7622,318.5223,-141.5121,-375.4407,380.3081,222.1228,443.7844,367.377,-202.9594,-493.6231,-184.2242,-253.9838,463.1952,-416.3887,252.0867,-63.5317,411.0727,98.6261,330.7369,363.5685,-498.1848,-413.7246,-2.5996,-238.3547,-355.6041,-303.698,43.6266,383.1105,-72.3066,274.7491,321.9322,220.9543,-30.5578,400.0891,-181.7069,-386.4403,497.2206,-408.9611,138.485,-133.5666,-340.2569,-223.6313,270.884,-215.9399,74.3931,-244.1364,353.4219,-156.9905,488.3148,96.352,401.8525,-468.8344,129.9715,-27.1953,-168.631,187.7049,-336.5255,331.0652,204.3538,36.0182,366.8502,-468.6579,478.1409,-332.6136,-281.8499,63.7165,-458.8161,14.8894,-145.6397,267.1499,85.2025,326.3764,-419.6361,-133.9626,102.0618,443.3099,-207.9032,132.7032,234.001,-26.0754,105.6478,174.1252,-403.3511,-164.9714,-262.9344,-58.9668,357.6414,355.7508,-331.8443,153.5733,417.5712,260.7394,-150.1053,-435.6525,-364.1558,328.6183,-270.0863,107.1746,345.7998,480.8749,206.3896,-498.237,495.0835,481.9384,418.5571,-246.5213,-363.7304,311.7076,-53.1664,-297.3839,122.3105,-13.9226,-145.9754,-189.1748,460.9375,194.5417,-28.1346,-261.2177,-88.8396,-254.6407,-465.3148,-169.5377,24.3113,-116.2323,-420.3526,317.2107,-231.6227,-270.8239,387.8598,412.4251,428.1373,308.2044,275.2082,402.3663,-209.9843,-492.7269,225.1948,326.469,207.3557,-131.7677,371.9408,-139.3098,324.205,-126.6204,-335.0853,-248.2587,-344.907,307.2109,-441.3296,-318.027,414.6535,172.0537,-280.4991,331.0475,-158.0178,-285.1951,12.3632,149.9347,282.8302,-91.5624,-180.6097,496.0881,368.2567,357.6875,-194.2106,48.9213,-479.2956,-165.139,238.7811,302.7007,297.2805,208.7099,-5.5755,-85.7911,-358.1111,344.6131,415.7199,-219.1525,490.5003,-46.0096,498.2818,-91.8067,384.0104,396.1107,408.2827,-5.3919,-333.7992,-168.985,273.72,359.7125,227.7621,158.3406,-366.9722,3.7709,27.2728,71.9754,269.5792,-365.281,117.9152,-184.3682,356.9013,-142.6579,-496.7598,122.0194,89.1247,4.1914,-81.9905,465.0841,115.4727,169.6116,-199.9951,-223.3149,-447.3022,11.831,320.2368,105.1316,344.2462,8.6333,62.2285,-70.3944,-284.6694,-482.4229,-448.1569,-237.7858,222.3921,-172.1386,-312.5756,-390.0565,398.951,119.9784,-419.6537,121.3186,481.3011,-181.6662,-56.0219,424.1359,7.1461,138.8567,-307.0606,334.066,254.0897,473.7227,45.5936,133.7268,49.5334,-283.3406,179.4466,105.6191,-30.4162,271.5774,6.1156,110.4732,286.4325,13.3431,494.0139,-371.7624,283.3652,272.0558,-302.343,122.7245,-463.9261,299.9807,282.4502,-262.4911,183.4289,222.7474,-229.5973,141.6188,262.5468,278.1155,-331.0891,-393.6027,-230.1461,201.6657,-93.3604,-395.8877,-125.2013,-222.973,368.3759,234.6628,-28.6809,-151.0703,432.0315,253.1214,430.7065,-143.6963,499.84,85.1683,280.4354,196.6013,139.0476,120.8148,-398.8155,-335.5504,229.0516,403.8604,-383.9868,-79.975,-152.77,220.4036,135.0355,238.2176,-242.3085,-177.0743,381.8202,411.167,378.0153,456.5976,364.013,24.2316,-395.4659,-210.2581,138.7539,479.7398,-291.7797,-123.0491,188.9817,42.8931,-354.4479,358.853,-43.6168,-190.6656,-103.3037,47.8915,-358.5402,374.9758,493.9951,-427.2376,-119.1142,-453.2975,-326.2696,-212.8273,-142.2931,-179.795,355.77,-156.2903,331.2006,451.9252,185.2944,-96.1941,173.0447,345.2744,43.0151,381.7845,-143.4125,84.654,-208.7053,-293.141,333.6349,-80.472,-376.9817,214.6298,-43.0931,-254.7834,-421.6961,-368.844,467.5544,-418.61,-66.6824,-350.2671,348.8241,252.3495,41.8677,-128.869,90.0391,-136.7405,-136.7822,489.8074,-396.8204,63.8355,323.9557,-83.6674,451.263,152.8955,-291.7497,410.0787,-299.7468,51.34,-298.6066,-58.853,325.911,-281.9541,-15.3457,299.1325,-347.4959,388.407,343.1096,28.1816,24.3013,-111.3312,190.5583,279.9848,-479.8894,123.2182,233.8425,-466.2128,-134.7122,217.8674,432.9523,-186.799,-477.2512,-223.5514,64.274,141.5251,-161.2187,150.2791,-228.1087,81.172,451.0879,-230.3818,-304.9398,402.1081,199.1266,275.3423,-123.9548,-21.1815,-384.544,446.9626,208.9692,-337.4827,-58.1011,344.2642,230.2868,44.9176,245.9885,-284.1875,-351.6104,108.1289,459.649,191.4334,53.591,136.7139,10.5912,-15.8411,62.8305,448.5256,194.7705,-356.3214,84.4996,-133.2502,-358.6308,262.7949,219.8741,-355.3985,468.2922,243.7227,-408.3166,188.6111,-221.7264,-286.8234,-340.3046,-224.5375,332.2615,73.2788,-24.7857,-485.2204,-136.7196,-162.9693,92.6017,-99.611,-186.5203,495.5483,240.8051,409.6493,-58.1321,-154.1239,-335.9719,-82.4408,-471.3057,-43.373,301.0884,-96.6359,-236.6906,435.7313,-227.7263,-406.8904,-392.3187,169.0043,-371.0852,-271.3652,-57.4466,-196.8455,52.741,361.7395,-117.8599,190.5339,276.6457,-321.9851,425.881,-473.2662,-74.2968,221.3612,-465.4429,181.723,-78.4508,21.6152,148.8107,-166.1687,-281.6391,-462.3636,-420.5255,-161.4143,98.8383,-374.5345,-366.2851,187.1506,-405.1865,239.4847,-246.8352,33.1748,-344.1211,477.9759,-294.1354,-359.5015,-44.8454,151.7072,-22.7324,-260.3293,99.1414,-20.5536,173.3766,-422.6692,458.3853,-199.7898,-236.3929,365.2599,-66.4191,388.3472,283.0336,-268.9463,269.5704,360.9679,-322.102,-407.0705,-93.0994,338.9108,-189.1359,-216.9102,-249.0153,122.6058,-254.8318,-112.2771,-279.0506,-168.4431,392.888,394.7607,468.0544,340.1852,-293.1288,-8.2912,-419.2608,323.3382,-93.8793,-242.0672,427.7716,-441.6906,128.3229,424.4679,-71.8586,134.5411,-74.5205,18.4141,17.7277,126.9123,-137.6119,33.3783,222.9912,-279.3582,89.1226,-90.031,12.7221,98.7767,-80.2372,-485.9212,-481.6575,-325.9729,318.8005,-433.786,-296.6337,421.6515,-27.2786,-445.2456,451.8876,-482.1014,-143.1098,186.1258,-90.2432,-297.7479,-351.0026,-423.7518,-219.6096,-269.2043,33.5767,-325.4335,392.4866,-418.243,112.5852,-248.1306,451.2154,-419.2995,154.5752,483.6323,-315.962,-196.872,406.1769,-356.9868,67.5251,-255.6475,103.5181,-450.4418,386.9518,456.4057,99.4591,-166.636,275.5374,200.4925,99.7623,292.6794,-422.3998,419.4837,-466.548,-462.8519,-381.4489,472.8356,-129.9563,441.4941,-376.1232,-114.1945,233.5531,313.6963,394.9503,-278.7558,350.7515,47.9427,220.7074,-178.9789,-346.0485,-128.5665,8.9461,159.9838,-57.3637,351.9478,-65.9411,-258.1788,498.9494,-472.613,-428.5678,17.3981,-435.3682,-421.155,-54.9177,-490.2348,178.3777,-31.9618,-242.1805,362.3736,380.8179,446.4272,-23.9142,61.3588,-489.5704,363.6446,-186.1519,-351.8684,-322.2791,-226.0431,404.6996,203.9824,306.0958,234.0145,-180.4996,452.0633,257.171,-83.6197,-393.152,396.6934,32.156,-428.7645,183.7886,494.767,68.3905,278.9785,-40.4759,261.7298,236.5778,4.5577,-130.9582,433.2837,-298.1139,-107.9822,-196.8446,-121.1765,-292.5509,-246.4546,-258.6038,280.1334,-52.6511,483.2928,-185.7577,-75.3705,351.3411,179.1282,-479.3838,166.2733,-197.9043,282.6848,-50.4744,-492.7178,183.6435,-127.2379,483.646,433.0805,-228.5488,139.8314,-145.1337,-403.1749,306.2704,122.7149,479.6928,85.3866,108.095,-224.152,494.6848,-368.4504,-180.7579,61.7136,51.2045,-383.0103,-376.4816,-292.8217,-201.118,332.1516,425.2758,138.1284,-229.4302,432.9081,2.9898,-437.7631,-448.2151,129.9126,-170.2405,499.0396,-48.2137,363.8046,-423.2511,-28.0804,-267.826,-356.6288,-99.9371,-409.8465,170.4902,-269.2584,-277.4098,300.8819,-142.5889,339.0952,16.2275,-310.8646,201.0733,-495.5905,341.9279,-149.1184,-494.4928,-81.7343,209.9762,273.4892,380.3163,359.2424,-242.5,-42.1268,-303.9792,11.6018,361.5483,416.4178,10.3282,195.9796,148.8096,-60.9724,-205.5221,-145.4574,-341.5913,426.8996,-19.5843,60.6265,-133.4191,-139.8737,281.7465,461.2854,-270.8902,61.0182,-58.6791,-254.0193,-234.1206,-208.7334,39.7498,-14.337,-68.2319,-342.2756,403.6834,401.6122,-166.1637,47.3592,-325.7,274.5459,343.4873,328.3783,-370.1657,-122.8967,-231.3182,122.6609,119.2685,-223.5437,-210.8076,116.5022,340.2814,256.1852,-217.3487,-150.9598,331.1343,-453.8182,-448.0842,-95.2475,-340.9942,-416.7835,-96.7226,-328.7212,-373.4337,472.2214,-484.522,-465.1583,330.0712,73.2052,-55.1266,-352.8984,341.0742,-230.4845,321.0752,236.2116,35.1902,75.3489,-469.4042,110.2036,35.1156,454.7224,103.0685,-221.7499,-23.6898,-259.2362,-110.509,-261.0039,219.2391,-139.9404,155.7723,377.9713,434.0318,-365.1397,459.1471,-318.5774,323.4256,194.325,-311.9529,-153.9019,-346.5811,76.4069,443.2121,-199.407,495.6636,-138.5213,-145.3432,-151.7758,-365.3547,263.6507,-491.1686,-183.5585,-12.6044,318.5346,-443.8639,-179.0338,477.9093,-355.5118,-423.0035,-229.1166,-96.7782,-479.2384,192.9085,223.3407,-302.9472,297.3847,477.584,-297.5958,168.6023,-80.6912,-89.8717,87.1476,-129.7807,346.5576,-253.9729,-399.6858,-389.5785,35.1648,-180.451,-49.6084,83.9582,-185.2329,97.283,195.5249,-91.6969,199.202,-449.792,333.4825,-113.7558,443.434,394.3587,-94.9074,71.2092,-251.1774,-85.047,-46.4004,20.2595,341.1073,-91.2527,86.3775,303.1247,-336.9011,343.9894,-384.1261,154.4411,-465.2493,-63.3249,488.0231,348.6725,458.2093,322.401,220.2532,283.3734,-386.4252,-256.5262,-87.2205,96.8199,47.6908,-399.6307,214.7716,-19.9177,-458.513,-194.3218,-320.5342,-275.857,-301.6955,-84.9038,358.3475,-88.9271,499.7721,-161.7403,355.4894,313.6211,-176.1703,61.8427,107.603,-176.063,-426.5408,292.3612,58.3331,-115.8853,471.4131,-76.4815,-309.6263,361.4518,192.4763,-145.7968,256.3888,133.335,-474.0901,-366.9793,-495.223,457.2366,170.056,285.0152,89.8213,225.2251,354.1822,-298.374,-332.9164,-55.2409,306.9283,25.9392,218.0624,7.5085,-151.8768,-155.4932,6.0001,201.4506,-259.9874,485.1078,-362.8516,-230.1434,-398.2512,243.0012,32.302,-197.91,144.1195,-89.4196,-44.0399,-371.7866,227.6007,492.7526,499.3824,162.2475,279.0325,177.0781,341.0137,199.6009,108.1678,312.2319,-211.5001,-92.675,357.0513,-337.924,-348.984,-350.3677,173.3473,-193.7346,-318.5609,-2.0928,46.6287,-346.8513,36.634,-277.4949,-149.325,481.1378,370.3864,-139.6689,-332.2805,48.0292,109.8363,494.6994,373.6992,495.7442,400.4998,-26.2276,-308.7669,188.9497,257.9182,-116.6944,269.8932,197.005,123.1139,-356.2058,485.1982,-4.0119,397.8434,-204.67,-494.5133,-414.1299,142.1512,-36.5446,390.0718,6.9876,263.1216,457.5598,89.6086,-266.3804,17.3457,88.8182,236.6271,81.175,-170.2249,-5.7664,422.7852,180.3349,-135.2642,149.2285,-70.6607,-46.169,-389.3313,230.6125,388.4853,-438.3426,111.8034,300.0416,37.5604,-437.3868,-114.1336,312.7777,-99.1161,-312.9015,-147.3787,-434.0536,19.5034,141.706,-281.4504,-208.9608,281.4619,-361.0596,-464.2757,77.8205,232.5575,165.4104,424.8738,124.5555,342.038,86.7543,278.0216,311.2686,337.834,-90.0545,-210.1143,-488.4095,-80.7535,92.3731,-122.622,-288.0571,1.7285,-5.2998,100.0717,-395.0571,-477.5587,-160.5642,-119.4214,-232.233,415.7276,-204.3216,-436.7766,-103.4644,-427.0939,-31.0927,-440.2919,120.5971,-223.3623,-199.0988,304.8697,432.5731,-231.5791,-397.696,306.4134,330.1018,32.4345,-175.719,464.6091,-291.5686,300.1631,-167.4592,238.9574,104.5893,-187.2215,-294.0111,-361.9094,480.6847,-304.2133,-448.7144,67.7235,-255.9669,254.7379,464.5465,6.8909,-368.7554,337.5993,39.1928,-376.0625,433.4224,-109.1488,341.7731,377.843,446.839,-192.283,251.1592,437.6812,-478.3409 ] + - do: + cluster.health: + wait_for_events: languid - do: indices.get_mapping: index: test-too-big-still-float @@ -162,7 +139,9 @@ teardown: dims: 5 index: true similarity: cosine - + - do: + cluster.health: + wait_for_events: languid - do: indices.get_mapping: index: set-dense-vector @@ -343,7 +322,9 @@ teardown: properties: my_dense_vector_field: type: dense_vector - + - do: + cluster.health: + wait_for_events: languid - do: indices.get_mapping: index: test-mapped-index @@ -390,7 +371,9 @@ teardown: properties: my_child_dense_vector_field: type: dense_vector - + - do: + cluster.health: + wait_for_events: languid - do: indices.get_mapping: index: test-mapped-index @@ -546,7 +529,9 @@ teardown: my_float1: [ 159.1, 289.56, -128.7424, 145.9871, -164.0003, 86.4034, -89.6929, 257.9717, 131.6075, 67.9233, -144.8255, 223.8446, 77.3228, -210.1163, -139.4783, 12.6499, 15.4491, 108.3465, -189.3947, 178.2045, -187.5925, 184.5089, 77.3022, -202.7439, -13.4959, 115.9719, -139.4332, 196.7845, 104.7573, -156.7746, 166.9878, 68.3936, 159.8473, -141.4446, 21.1947, 186.5908, -209.6895, 68.6169, 44.1255, 147.4659, 56.5079, -179.7997, -85.1651, 11.4847, 124.1662, 96.2246, -178.6705, 85.5925, 205.3616, -16.4704, 172.4947, -115.2535, -58.1722, 94.4836, 34.6458, -70.1011, -58.8047, 149.9562, -37.8998, 196.9805, -169.3555, -163.9432, 188.5611, 214.8378, 29.3182, -24.8724, 152.9382, -109.4345, -123.6716, -8.2441, 64.5902, 27.8083, 40.8185, -94.3161, 58.1463, -138.7432, 24.6805, -88.7222, -11.2018, 206.6434, 201.9024, 87.3079, -3.2883, -60.2484, -109.5789, 105.5766, -116.6709, -17.7073, -71.5093, -75.2937, -176.8691, -146.4967, 53.7586, 199.5294, 55.9754, -48.7399, 82.2051, 135.2921, 22.4408, -116.4008, -33.7538, 29.7207, 6.3692, -97.5768, -12.7982, -200.9331, -62.2743, 81.0843, 136.2247, 150.2565, 139.6838, 155.2657, -25.7447, 198.5955, 18.8099, 46.9014, -60.2672, 136.4801, 171.8966, 172.5842, 13.9123, 75.8386, -64.2444, -48.1964, 135.9685, 7.4927, -40.6424, -76.8922 ] - + - do: + cluster.health: + wait_for_events: languid - do: indices.get_mapping: index: test-copyto-index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/150_runtime_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/150_runtime_fields.yml new file mode 100644 index 0000000000000..8cb2802c2ef95 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/150_runtime_fields.yml @@ -0,0 +1,60 @@ +--- +tsdb_execute_painless_api: + - skip: + version: " - 8.11.99" + reason: fixed in 8.12.0 and later + + - do: + indices.create: + index: test_index + body: + settings: + index: + mode: time_series + routing_path: [metricset, k8s.pod.uid] + mappings: + properties: + "@timestamp": + type: date + metricset: + type: keyword + time_series_dimension: true + k8s: + properties: + pod: + properties: + uid: + type: keyword + time_series_dimension: true + name: + type: keyword + ip: + type: ip + network: + properties: + tx: + type: long + rx: + type: long + + - do: + scripts_painless_execute: + body: + script: + source: "emit(doc['k8s.pod.network.tx'].value < 1000);" + context: "boolean_field" + context_setup: + index: test_index + document: + "@timestamp": "2021-04-28T18:51:03.142Z" + metricset: pod + k8s: + pod: + name: dog + uid: df3145b3-0563-4d3b-a0f7-897eb2876ea9 + ip: 10.10.55.3 + network: + tx: 111434595272 + rx: 430605511 + + - match: { result: [false] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 3d297e2181970..7edae8f264c76 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -574,37 +574,6 @@ source include/exclude: type: keyword time_series_dimension: true ---- -Unsupported metric type position: - - skip: - version: "all, - 8.0.99, 8.8.0 -" - reason: AwaitsFix https://github.com/elastic/elasticsearch/issues/94239, index.mode and routing_path introduced in 8.1.0 and time series metric position introduced in 8.8.0 - - - do: - catch: '/unknown parameter \[time_series_metric\] on mapper \[location\] of type \[geo_point\]/' - indices.create: - index: test_position - body: - settings: - number_of_shards: 1 - number_of_replicas: 0 - index: - mode: time_series - routing_path: [metricset] - time_series: - start_time: 2021-04-28T00:00:00Z - end_time: 2021-04-29T00:00:00Z - mappings: - properties: - "@timestamp": - type: date - metricset: - type: keyword - time_series_dimension: true - location: - type: geo_point - time_series_metric: position - --- Supported metric type position: - skip: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/RejectionActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/RejectionActionIT.java index a98f98ded28b4..c7082f7979ed9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/RejectionActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/RejectionActionIT.java @@ -50,8 +50,7 @@ public void testSimulatedSearchRejectionLoad() throws Throwable { final CountDownLatch latch = new CountDownLatch(numberOfAsyncOps); final CopyOnWriteArrayList responses = new CopyOnWriteArrayList<>(); for (int i = 0; i < numberOfAsyncOps; i++) { - client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field", "1")) .execute(new LatchedActionListener<>(new ActionListener() { @Override diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 9d745994fd658..6a3a7ccfe221a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -107,8 +107,7 @@ public void onFailure(Exception e) { ensureSearchable(); while (latch.getCount() > 0) { assertHitCount( - client().prepareSearch() - .setQuery(matchAllQuery()) + prepareSearch().setQuery(matchAllQuery()) .setPostFilter( boolQuery().must(matchAllQuery()) .mustNot(boolQuery().must(termQuery("field1", "value1")).must(termQuery("field1", "value2"))) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index ba1a8b7919963..1c358fe06b68f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -1259,7 +1259,6 @@ private void prepareIndex( indicesAdmin().prepareCreate("idx") .setSettings(indexSettings(numPrimaries, numReplicas).put(settings)) .setWaitForActiveShards(activeShardCount) - .get() ); if (activeShardCount != ActiveShardCount.NONE) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index c0a54ef874de5..dee70b8fa3ca9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -50,6 +50,7 @@ import org.elasticsearch.tasks.TaskResult; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.test.tasks.MockTaskManagerListener; import org.elasticsearch.test.transport.MockTransportService; @@ -522,7 +523,6 @@ public void testTasksCancellation() throws Exception { assertEquals(0, clusterAdmin().prepareListTasks().setActions(TEST_TASK_ACTION.name() + "*").get().getTasks().size()); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/95325") public void testTasksUnblocking() throws Exception { // Start blocking test task TestTaskPlugin.NodesRequest request = new TestTaskPlugin.NodesRequest("test"); @@ -543,6 +543,10 @@ public void testTasksUnblocking() throws Exception { ); } + @TestLogging( + reason = "https://github.com/elastic/elasticsearch/issues/97923", + value = "org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction:TRACE" + ) public void testListTasksWaitForCompletion() throws Exception { waitForCompletionTestCase( randomBoolean(), @@ -782,15 +786,15 @@ public void testTaskStoringSuccessfulResult() throws Exception { assertNoFailures(indicesAdmin().prepareRefresh(TaskResultsService.TASK_INDEX).get()); - SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX) - .setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.action()))) - .get(); + SearchResponse searchResponse = prepareSearch(TaskResultsService.TASK_INDEX).setSource( + SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.action())) + ).get(); assertEquals(1L, searchResponse.getHits().getTotalHits().value); - searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX) - .setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.taskId().getNodeId()))) - .get(); + searchResponse = prepareSearch(TaskResultsService.TASK_INDEX).setSource( + SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.taskId().getNodeId())) + ).get(); assertEquals(1L, searchResponse.getHits().getTotalHits().value); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java index b7dadaaff3d01..8750389480071 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateActionDisruptionIT.java @@ -193,12 +193,9 @@ public void runRepeatedlyWhileChangingMaster(Runnable runnable) throws Exception assertingThread.start(); updatingThread.start(); - final MockTransportService masterTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - masterName - ); + final var masterTransportService = MockTransportService.getInstance(masterName); - for (MockTransportService mockTransportService : mockTransportServices) { + for (final var mockTransportService : mockTransportServices) { if (masterTransportService != mockTransportService) { masterTransportService.addFailToSendNoConnectRule(mockTransportService); mockTransportService.addFailToSendNoConnectRule(masterTransportService); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java index 127d399eab04a..e5edeccbad55d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/AutoCreateSystemIndexIT.java @@ -169,7 +169,6 @@ private String autoCreateSystemAliasViaV1Template(String indexName) throws Excep indicesAdmin().preparePutTemplate("test-template") .setPatterns(List.of(indexName + "*")) .addAlias(new Alias(indexName + "-legacy-alias")) - .get() ); String nonPrimaryIndex = indexName + "-2"; @@ -222,7 +221,7 @@ private String autoCreateSystemAliasViaComposableTemplate(String indexName) thro client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("test-composable-template").indexTemplate(cit) - ).get() + ) ); String nonPrimaryIndex = indexName + "-2"; @@ -246,7 +245,7 @@ public void testAutoCreateSystemAliasViaComposableTemplate() throws Exception { client().execute( DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request("test-composable-template") - ).get() + ) ); } @@ -269,7 +268,7 @@ public void testAutoCreateSystemAliasViaComposableTemplateAllowsTemplates() thro client().execute( DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request("test-composable-template") - ).get() + ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CloneIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CloneIndexIT.java index 4f84509086a5e..93d12c686297f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CloneIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CloneIndexIT.java @@ -64,7 +64,6 @@ public void testCreateCloneIndex() { .setSettings( Settings.builder().put("index.number_of_replicas", createWithReplicas ? 1 : 0).putNull("index.blocks.write").build() ) - .get() ); ensureGreen(); assertNoResizeSourceIndexSettings("target"); @@ -80,21 +79,21 @@ public void testCreateCloneIndex() { } final int size = docs > 0 ? 2 * docs : 1; - assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); if (createWithReplicas == false) { // bump replicas setReplicaCount(1, "target"); ensureGreen(); - assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); } for (int i = docs; i < 2 * docs; i++) { client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); - assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs); - assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs); + assertHitCount(prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); GetSettingsResponse target = indicesAdmin().prepareGetSettings("target").get(); assertThat( target.getIndexToSettings().get("target").getAsVersionId("index.version.created", IndexVersion::fromId), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index 27154c883d270..e3ea54f382c0a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -270,11 +270,10 @@ public void onFailure(Exception e) { // we only really assert that we never reuse segments of old indices or anything like this here and that nothing fails with // crazy exceptions - SearchResponse expected = client().prepareSearch("test") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + SearchResponse expected = prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)) .get(); - SearchResponse all = client().prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get(); + SearchResponse all = prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).get(); assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); logger.info("total: {}", expected.getHits().getTotalHits().value); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java index f5a2121b2dde9..a0dffa8b7caa8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateSystemIndicesIT.java @@ -66,9 +66,7 @@ public void beforeEach() { @After public void afterEach() throws Exception { assertAcked(indicesAdmin().prepareDeleteTemplate("*").get()); - assertAcked( - client().execute(DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request("*")).get() - ); + assertAcked(client().execute(DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request("*"))); } @Override @@ -160,7 +158,6 @@ private void createSystemAliasViaV1Template(String indexName, String primaryInde indicesAdmin().preparePutTemplate("test-template") .setPatterns(List.of(indexName + "*")) .addAlias(new Alias(indexName + "-legacy-alias")) - .get() ); assertAcked(prepareCreate(primaryIndexName)); @@ -213,7 +210,7 @@ private void createIndexWithComposableTemplates(String indexName, String primary client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("test-composable-template").indexTemplate(cit) - ).get() + ) ); assertAcked(prepareCreate(primaryIndexName)); @@ -232,7 +229,7 @@ public void testCreateSystemAliasViaComposableTemplate() throws Exception { client().execute( DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request("test-composable-template") - ).get() + ) ); } @@ -259,7 +256,7 @@ public void testCreateSystemAliasViaComposableTemplateWithAllowsTemplates() thro client().execute( DeleteComposableIndexTemplateAction.INSTANCE, new DeleteComposableIndexTemplateAction.Request("test-composable-template") - ).get() + ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index 173719a8dc92e..b0ec5de81984a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -104,10 +104,9 @@ public void testCreateShrinkIndexToN() { assertAcked( indicesAdmin().prepareResizeIndex("source", "first_shrink") .setSettings(indexSettings(shardSplits[1], 0).putNull("index.blocks.write").build()) - .get() ); ensureGreen(); - assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); for (int i = 0; i < 20; i++) { // now update client().prepareIndex("first_shrink") @@ -116,8 +115,8 @@ public void testCreateShrinkIndexToN() { .get(); } flushAndRefresh(); - assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); - assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); // relocate all shards to one node such that we can merge it. updateIndexSettings( @@ -131,17 +130,16 @@ public void testCreateShrinkIndexToN() { .setSettings( indexSettings(shardSplits[2], 0).putNull("index.blocks.write").putNull("index.routing.allocation.require._name").build() ) - .get() ); ensureGreen(); - assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); // let it be allocated anywhere and bump replicas updateIndexSettings( Settings.builder().putNull("index.routing.allocation.include._id").put("index.number_of_replicas", 1), "second_shrink" ); ensureGreen(); - assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); for (int i = 0; i < 20; i++) { // now update client().prepareIndex("second_shrink") @@ -150,9 +148,9 @@ public void testCreateShrinkIndexToN() { .get(); } flushAndRefresh(); - assertHitCount(client().prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); - assertHitCount(client().prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); - assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("second_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("first_shrink").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); assertNoResizeSourceIndexSettings("first_shrink"); assertNoResizeSourceIndexSettings("second_shrink"); @@ -272,7 +270,6 @@ public void testCreateShrinkIndex() { .putNull("index.routing.allocation.require._name") .build() ) - .get() ); ensureGreen(); @@ -311,21 +308,21 @@ public void testCreateShrinkIndex() { } final int size = docs > 0 ? 2 * docs : 1; - assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); if (createWithReplicas == false) { // bump replicas setReplicaCount(1, "target"); ensureGreen(); - assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); } for (int i = docs; i < 2 * docs; i++) { client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); - assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs); - assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs); + assertHitCount(prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); GetSettingsResponse target = indicesAdmin().prepareGetSettings("target").get(); assertThat( target.getIndexToSettings().get("target").getAsVersionId("index.version.created", IndexVersion::fromId), @@ -409,7 +406,7 @@ public void testCreateShrinkIndexFails() throws Exception { // we support the expected shard size in the allocator to sum up over the source index shards assertTrue("expected shard size must be set but wasn't: " + expectedShardSize, expectedShardSize > 0); ensureGreen(); - assertHitCount(client().prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); + assertHitCount(prepareSearch("target").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), 20); assertNoResizeSourceIndexSettings("target"); } @@ -462,9 +459,7 @@ public void testCreateShrinkWithIndexSort() throws Exception { // check that the index sort order of `source` is correctly applied to the `target` assertAcked( - indicesAdmin().prepareResizeIndex("source", "target") - .setSettings(indexSettings(2, 0).putNull("index.blocks.write").build()) - .get() + indicesAdmin().prepareResizeIndex("source", "target").setSettings(indexSettings(2, 0).putNull("index.blocks.write").build()) ); ensureGreen(); assertNoResizeSourceIndexSettings("target"); @@ -513,7 +508,6 @@ public void testShrinkCommitsMergeOnIdle() throws Exception { assertAcked( indicesAdmin().prepareResizeIndex("source", "target") .setSettings(Settings.builder().put("index.number_of_replicas", 0).build()) - .get() ); ensureGreen(); assertNoResizeSourceIndexSettings("target"); @@ -586,7 +580,6 @@ public void testShrinkThenSplitWithFailedNode() throws Exception { ).build() ) .setResizeType(ResizeType.SHRINK) - .get() ); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index 84fe73ead7d8a..54add487a3dd4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -186,10 +186,9 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha indicesAdmin().prepareResizeIndex("source", "first_split") .setResizeType(ResizeType.SPLIT) .setSettings(firstSplitSettingsBuilder.build()) - .get() ); ensureGreen(); - assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); assertNoResizeSourceIndexSettings("first_split"); for (int i = 0; i < numDocs; i++) { // now update @@ -200,8 +199,8 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha builder.get(); } flushAndRefresh(); - assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); - assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); for (int i = 0; i < numDocs; i++) { GetResponse getResponse = client().prepareGet("first_split", Integer.toString(i)).setRouting(routingValue[i]).get(); assertTrue(getResponse.isExists()); @@ -214,16 +213,15 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha indicesAdmin().prepareResizeIndex("first_split", "second_split") .setResizeType(ResizeType.SPLIT) .setSettings(indexSettings(secondSplitShards, 0).putNull("index.blocks.write").build()) - .get() ); ensureGreen(); - assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); assertNoResizeSourceIndexSettings("second_split"); // let it be allocated anywhere and bump replicas setReplicaCount(1, "second_split"); ensureGreen(); - assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); for (int i = 0; i < numDocs; i++) { // now update IndexRequestBuilder builder = indexFunc.apply("second_split", i); @@ -237,30 +235,24 @@ private void splitToN(int sourceShards, int firstSplitShards, int secondSplitSha GetResponse getResponse = client().prepareGet("second_split", Integer.toString(i)).setRouting(routingValue[i]).get(); assertTrue(getResponse.isExists()); } - assertHitCount(client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); - assertHitCount(client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); - assertHitCount(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); + assertHitCount(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), numDocs); if (useNested) { assertNested("source", numDocs); assertNested("first_split", numDocs); assertNested("second_split", numDocs); } - assertAllUniqueDocs( - client().prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), - numDocs - ); - assertAllUniqueDocs( - client().prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), - numDocs - ); - assertAllUniqueDocs(client().prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); + assertAllUniqueDocs(prepareSearch("second_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); + assertAllUniqueDocs(prepareSearch("first_split").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); + assertAllUniqueDocs(prepareSearch("source").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")).get(), numDocs); } public void assertNested(String index, int numDocs) { // now, do a nested query - SearchResponse searchResponse = client().prepareSearch(index) - .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) - .get(); + SearchResponse searchResponse = prepareSearch(index).setQuery( + nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg) + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)); } @@ -373,7 +365,6 @@ public void testCreateSplitIndex() throws Exception { indicesAdmin().prepareResizeIndex("source", "target") .setResizeType(ResizeType.SPLIT) .setSettings(indexSettings(2, createWithReplicas ? 1 : 0).putNull("index.blocks.write").build()) - .get() ); ensureGreen(); assertNoResizeSourceIndexSettings("target"); @@ -410,21 +401,21 @@ public void testCreateSplitIndex() throws Exception { } final int size = docs > 0 ? 2 * docs : 1; - assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); if (createWithReplicas == false) { // bump replicas setReplicaCount(1, "target"); ensureGreen(); - assertHitCount(client().prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); } for (int i = docs; i < 2 * docs; i++) { client().prepareIndex("target").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", XContentType.JSON).get(); } flushAndRefresh(); - assertHitCount(client().prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs); - assertHitCount(client().prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); + assertHitCount(prepareSearch("target").setSize(2 * size).setQuery(new TermsQueryBuilder("foo", "bar")), 2 * docs); + assertHitCount(prepareSearch("source").setSize(size).setQuery(new TermsQueryBuilder("foo", "bar")), docs); GetSettingsResponse target = indicesAdmin().prepareGetSettings("target").get(); assertThat( target.getIndexToSettings().get("target").getAsVersionId("index.version.created", IndexVersion::fromId), @@ -484,7 +475,6 @@ public void testCreateSplitWithIndexSort() throws Exception { indicesAdmin().prepareResizeIndex("source", "target") .setResizeType(ResizeType.SPLIT) .setSettings(indexSettings(4, 0).putNull("index.blocks.write").build()) - .get() ); ensureGreen(); flushAndRefresh(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java index b44c49982e31b..dc5cc49092f7a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexBlocksIT.java @@ -48,20 +48,17 @@ public void testDeleteIndexOnIndexReadOnlyAllowDeleteSetting() { refresh(); try { updateIndexSettings(Settings.builder().put(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE, true), "test"); - assertSearchHits(client().prepareSearch(), "1"); + assertSearchHits(prepareSearch(), "1"); assertBlocked( client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ); - assertSearchHits(client().prepareSearch(), "1"); + assertSearchHits(prepareSearch(), "1"); assertAcked(indicesAdmin().prepareDelete("test")); } finally { Settings settings = Settings.builder().putNull(IndexMetadata.SETTING_READ_ONLY_ALLOW_DELETE).build(); assertAcked( - indicesAdmin().prepareUpdateSettings("test") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setSettings(settings) - .get() + indicesAdmin().prepareUpdateSettings("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setSettings(settings) ); } } @@ -92,7 +89,7 @@ public void testDeleteIndexOnClusterReadOnlyAllowDeleteSetting() { refresh(); try { updateClusterSettings(Settings.builder().put(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), true)); - assertSearchHits(client().prepareSearch(), "1"); + assertSearchHits(prepareSearch(), "1"); assertBlocked( client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), Metadata.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK @@ -101,7 +98,7 @@ public void testDeleteIndexOnClusterReadOnlyAllowDeleteSetting() { indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.number_of_replicas", 2)), Metadata.CLUSTER_READ_ONLY_ALLOW_DELETE_BLOCK ); - assertSearchHits(client().prepareSearch(), "1"); + assertSearchHits(prepareSearch(), "1"); assertAcked(indicesAdmin().prepareDelete("test")); } finally { updateClusterSettings(Settings.builder().putNull(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java index 09b72a3c50f3b..94c08bd7e8162 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.junit.Before; @@ -249,23 +248,23 @@ public void testFailingTargetShards() throws Exception { final AtomicInteger successfulShards = new AtomicInteger(); try { for (String node : internalCluster().getNodeNames()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.addRequestHandlingBehavior(AnalyzeIndexDiskUsageAction.NAME + "[s]", (handler, request, channel, task) -> { - AnalyzeDiskUsageShardRequest shardRequest = (AnalyzeDiskUsageShardRequest) request; - IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); - logger.info("--> handling shard request {} on node {}", shardRequest.shardId(), node); - ShardId shardId = shardRequest.shardId(); - if (failingShards.contains(shardId)) { - IndexShard indexShard = indicesService.getShardOrNull(shardId); - assertNotNull("No shard found for shard " + shardId, indexShard); - logger.info("--> failing shard {} on node {}", shardRequest.shardId(), node); - indexShard.close("test", randomBoolean()); - failedShards.incrementAndGet(); - } else { - successfulShards.incrementAndGet(); - } - handler.messageReceived(request, channel, task); - }); + MockTransportService.getInstance(node) + .addRequestHandlingBehavior(AnalyzeIndexDiskUsageAction.NAME + "[s]", (handler, request, channel, task) -> { + AnalyzeDiskUsageShardRequest shardRequest = (AnalyzeDiskUsageShardRequest) request; + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node); + logger.info("--> handling shard request {} on node {}", shardRequest.shardId(), node); + ShardId shardId = shardRequest.shardId(); + if (failingShards.contains(shardId)) { + IndexShard indexShard = indicesService.getShardOrNull(shardId); + assertNotNull("No shard found for shard " + shardId, indexShard); + logger.info("--> failing shard {} on node {}", shardRequest.shardId(), node); + indexShard.close("test", randomBoolean()); + failedShards.incrementAndGet(); + } else { + successfulShards.incrementAndGet(); + } + handler.messageReceived(request, channel, task); + }); } AnalyzeIndexDiskUsageResponse resp = client().execute( AnalyzeIndexDiskUsageAction.INSTANCE, @@ -279,8 +278,7 @@ public void testFailingTargetShards() throws Exception { assertThat(resp.getShardFailures(), arrayWithSize(failedShards.get())); } finally { for (String node : internalCluster().getNodeNames()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.clearAllRules(); + MockTransportService.getInstance(node).clearAllRules(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 682883af044a1..d7e4e42b73554 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -525,7 +525,7 @@ public void testRolloverMaxPrimaryShardSize() throws Exception { public void testRolloverMaxPrimaryShardDocs() throws Exception { assertAcked( - prepareCreate("test-1").setSettings(Settings.builder().put("index.number_of_shards", 1)).addAlias(new Alias("test_alias")).get() + prepareCreate("test-1").setSettings(Settings.builder().put("index.number_of_shards", 1)).addAlias(new Alias("test_alias")) ); int numDocs = randomIntBetween(10, 20); for (int i = 0; i < numDocs; i++) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java index 96e3939312870..224db253675d2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkIntegrationIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; @@ -145,11 +144,7 @@ private void createSamplePipeline(String pipelineId) throws IOException, Executi .endArray() .endObject(); - AcknowledgedResponse acknowledgedResponse = clusterAdmin().putPipeline( - new PutPipelineRequest(pipelineId, BytesReference.bytes(pipeline), XContentType.JSON) - ).get(); - - assertTrue(acknowledgedResponse.isAcknowledged()); + assertAcked(clusterAdmin().putPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(pipeline), XContentType.JSON))); } /** This test ensures that index deletion makes indexing fail quickly, not wait on the index that has disappeared */ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 33fcaaea39853..18a8ae2dd2800 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -136,7 +136,7 @@ public void afterBulk(long executionId, BulkRequest request, Exception failure) indicesAdmin().refresh(new RefreshRequest()).get(); - SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get(); + SearchResponse results = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get(); assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L)); if (rejectedExecutionExpected) { assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 44b7040ba3267..e664f6e6bb42f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -131,7 +131,7 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) indicesAdmin().refresh(new RefreshRequest()).get(); - SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get(); + SearchResponse results = prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get(); if (rejectedExecutionExpected) { assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java index 7009ba7858dd7..9433f93d91f58 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkWithUpdatesIT.java @@ -514,7 +514,7 @@ public void testBulkIndexingWhileInitializing() throws Exception { refresh(); - assertHitCount(client().prepareSearch().setSize(0), numDocs); + assertHitCount(prepareSearch().setSize(0), numDocs); } public void testFailingVersionedUpdatedOnBulk() throws Exception { @@ -634,7 +634,7 @@ public void testThatMissingIndexDoesNotAbortFullBulkRequest() throws Exception { .setRefreshPolicy(RefreshPolicy.IMMEDIATE); client().bulk(bulkRequest).get(); - assertHitCount(client().prepareSearch("bulkindex*"), 3); + assertHitCount(prepareSearch("bulkindex*"), 3); assertBusy(() -> assertAcked(indicesAdmin().prepareClose("bulkindex2"))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java index f0c6359876aab..6ec01c3be5626 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java @@ -42,7 +42,7 @@ public void testIndexWithWriteDelayEnabled() throws Exception { for (int j = 0; j < numOfChecks; j++) { try { logger.debug("running search"); - SearchResponse response = client().prepareSearch("test").get(); + SearchResponse response = prepareSearch("test").get(); if (response.getHits().getTotalHits().value != numOfDocs) { final String message = "Count is " + response.getHits().getTotalHits().value diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java index 78fd60579d5d2..7871a14264944 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/LookupRuntimeFieldIT.java @@ -132,8 +132,7 @@ public void populateIndex() throws Exception { } public void testBasic() { - SearchResponse searchResponse = client().prepareSearch("books") - .addFetchField("author") + SearchResponse searchResponse = prepareSearch("books").addFetchField("author") .addFetchField("title") .addSort("published_date", SortOrder.DESC) .setSize(3) @@ -169,18 +168,17 @@ public void testBasic() { } public void testLookupMultipleIndices() throws IOException { - SearchResponse searchResponse = client().prepareSearch("books") - .setRuntimeMappings(parseMapping(""" - { - "publisher": { - "type": "lookup", - "target_index": "publishers", - "input_field": "publisher_id", - "target_field": "_id", - "fetch_fields": ["name", "city"] - } + SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" + { + "publisher": { + "type": "lookup", + "target_index": "publishers", + "input_field": "publisher_id", + "target_field": "_id", + "fetch_fields": ["name", "city"] } - """)) + } + """)) .setFetchSource(false) .addFetchField("title") .addFetchField("author") @@ -217,7 +215,7 @@ public void testLookupMultipleIndices() throws IOException { } public void testFetchField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("books").setRuntimeMappings(parseMapping(""" + SearchResponse searchResponse = prepareSearch("books").setRuntimeMappings(parseMapping(""" { "author": { "type": "lookup", diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 09e0c064685a4..bb7658f5011e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import java.util.Collection; import java.util.HashSet; @@ -83,7 +82,7 @@ public void testBasic() { } refresh("test"); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); - SearchResponse resp1 = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertThat(resp1.pointInTimeId(), equalTo(pitId)); assertHitCount(resp1, numDocs); int deletedDocs = 0; @@ -96,13 +95,12 @@ public void testBasic() { } refresh("test"); if (randomBoolean()) { - SearchResponse resp2 = client().prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get(); + SearchResponse resp2 = prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get(); assertNoFailures(resp2); assertHitCount(resp2, numDocs - deletedDocs); } try { - SearchResponse resp3 = client().prepareSearch() - .setPreference(null) + SearchResponse resp3 = prepareSearch().setPreference(null) .setQuery(new MatchAllQueryBuilder()) .setPointInTime(new PointInTimeBuilder(pitId)) .get(); @@ -128,7 +126,7 @@ public void testMultipleIndices() { refresh(); String pitId = openPointInTime(new String[] { "*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); @@ -140,11 +138,11 @@ public void testMultipleIndices() { client().prepareIndex(index).setId(id).setSource("value", i).get(); } refresh(); - resp = client().prepareSearch().get(); + resp = prepareSearch().get(); assertNoFailures(resp); assertHitCount(resp, numDocs + moreDocs); - resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); @@ -165,7 +163,7 @@ public void testRelocation() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -185,7 +183,7 @@ public void testRelocation() throws Exception { } refresh(); } - resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -198,7 +196,7 @@ public void testRelocation() throws Exception { .collect(Collectors.toSet()); assertThat(assignedNodes, everyItem(not(in(excludedNodes)))); }, 30, TimeUnit.SECONDS); - resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -216,7 +214,7 @@ public void testPointInTimeNotFound() throws Exception { } refresh(); String pit = openPointInTime(new String[] { "index" }, TimeValue.timeValueSeconds(5)); - SearchResponse resp1 = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); + SearchResponse resp1 = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); assertNoFailures(resp1); assertHitCount(resp1, index1); if (rarely()) { @@ -229,7 +227,7 @@ public void testPointInTimeNotFound() throws Exception { } SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() + () -> prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() ); for (ShardSearchFailure failure : e.shardFailures()) { assertThat(ExceptionsHelper.unwrapCause(failure.getCause()), instanceOf(SearchContextMissingException.class)); @@ -254,30 +252,25 @@ public void testIndexNotFound() { refresh(); String pit = openPointInTime(new String[] { "index-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); + SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get(); assertNoFailures(resp); assertHitCount(resp, index1 + index2); indicesAdmin().prepareDelete("index-1").get(); if (randomBoolean()) { - resp = client().prepareSearch("index-*").get(); + resp = prepareSearch("index-*").get(); assertNoFailures(resp); assertHitCount(resp, index2); } // Allow partial search result - resp = client().prepareSearch() - .setPreference(null) - .setAllowPartialSearchResults(true) - .setPointInTime(new PointInTimeBuilder(pit)) - .get(); + resp = prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)).get(); assertFailures(resp); assertHitCount(resp, index2); // Do not allow partial search result expectThrows( ElasticsearchException.class, - () -> client().prepareSearch() - .setPreference(null) + () -> prepareSearch().setPreference(null) .setAllowPartialSearchResults(false) .setPointInTime(new PointInTimeBuilder(pit)) .get() @@ -313,8 +306,7 @@ public void testCanMatch() throws Exception { } } client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); - SearchResponse resp = client().prepareSearch() - .setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + SearchResponse resp = prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference(null) .setPreFilterShardSize(randomIntBetween(2, 3)) @@ -373,14 +365,13 @@ public void testPartialResults() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test-*" }, TimeValue.timeValueMinutes(2)); try { - SearchResponse resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs1 + numDocs2); assertThat(resp.pointInTimeId(), equalTo(pitId)); internalCluster().restartNode(assignedNodeForIndex1); - resp = client().prepareSearch() - .setPreference(null) + resp = prepareSearch().setPreference(null) .setAllowPartialSearchResults(true) .setPointInTime(new PointInTimeBuilder(pitId)) .get(); @@ -457,7 +448,7 @@ public void testOpenPITConcurrentShardRequests() throws Exception { .build() ) ); - var transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, dataNode.getName()); + final var transportService = MockTransportService.getInstance(dataNode.getName()); try { CountDownLatch sentLatch = new CountDownLatch(maxConcurrentRequests); CountDownLatch readyLatch = new CountDownLatch(1); @@ -491,7 +482,7 @@ public void testOpenPITConcurrentShardRequests() throws Exception { @SuppressWarnings({ "rawtypes", "unchecked" }) private void assertPagination(PointInTimeBuilder pit, int expectedNumDocs, int size, SortBuilder... sorts) throws Exception { Set seen = new HashSet<>(); - SearchRequestBuilder builder = client().prepareSearch().setSize(size).setPointInTime(pit); + SearchRequestBuilder builder = prepareSearch().setSize(size).setPointInTime(pit); for (SortBuilder sort : sorts) { builder.addSort(sort); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index e44a5a6a48181..d84d4270af24c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -70,8 +70,10 @@ import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -167,7 +169,7 @@ public void testLocalClusterAlias() { } } - public void testAbsoluteStartMillis() { + public void testAbsoluteStartMillis() throws ExecutionException, InterruptedException { TaskId parentTaskId = new TaskId("node", randomNonNegativeLong()); { IndexRequest indexRequest = new IndexRequest("test-1970.01.01"); @@ -186,9 +188,7 @@ public void testAbsoluteStartMillis() { assertEquals(RestStatus.CREATED, indexResponse.status()); } { - SearchRequest searchRequest = new SearchRequest(); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertHitCount(client().search(new SearchRequest()), 2); } { SearchRequest searchRequest = new SearchRequest(""); @@ -205,8 +205,7 @@ public void testAbsoluteStartMillis() { 0, randomBoolean() ); - SearchResponse searchResponse = client().search(searchRequest).actionGet(); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertHitCount(client().search(searchRequest), 2); } { SearchRequest searchRequest = SearchRequest.subSearchRequest( @@ -310,12 +309,11 @@ public void testWaitForRefreshIndexValidation() throws Exception { Arrays.fill(validCheckpoints, SequenceNumbers.UNASSIGNED_SEQ_NO); // no exception - client().prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get(); + prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", validCheckpoints)).get(); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("testFailedAlias") - .setWaitForCheckpoints(Collections.singletonMap("testFailedAlias", validCheckpoints)) + () -> prepareSearch("testFailedAlias").setWaitForCheckpoints(Collections.singletonMap("testFailedAlias", validCheckpoints)) .get() ); assertThat( @@ -328,7 +326,7 @@ public void testWaitForRefreshIndexValidation() throws Exception { IllegalArgumentException e2 = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("test1").setWaitForCheckpoints(Collections.singletonMap("test1", new long[2])).get() + () -> prepareSearch("test1").setWaitForCheckpoints(Collections.singletonMap("test1", new long[2])).get() ); assertThat( e2.getMessage(), @@ -342,7 +340,7 @@ public void testWaitForRefreshIndexValidation() throws Exception { IllegalArgumentException e3 = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", new long[2])).get() + () -> prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("testAlias", new long[2])).get() ); assertThat( e3.getMessage(), @@ -356,7 +354,7 @@ public void testWaitForRefreshIndexValidation() throws Exception { IllegalArgumentException e4 = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("test2", validCheckpoints)).get() + () -> prepareSearch("testAlias").setWaitForCheckpoints(Collections.singletonMap("test2", validCheckpoints)).get() ); assertThat( e4.getMessage(), @@ -375,11 +373,11 @@ public void testShardCountLimit() throws Exception { assertAcked(prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numPrimaries2))); // no exception - client().prepareSearch("test1").get(); + prepareSearch("test1").get(); updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1 - 1)); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("test1").get()); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> prepareSearch("test1").get()); assertThat( e.getMessage(), containsString("Trying to query " + numPrimaries1 + " shards, which is over the limit of " + (numPrimaries1 - 1)) @@ -388,9 +386,9 @@ public void testShardCountLimit() throws Exception { updateClusterSettings(Settings.builder().put(TransportSearchAction.SHARD_COUNT_LIMIT_SETTING.getKey(), numPrimaries1)); // no exception - client().prepareSearch("test1").get(); + prepareSearch("test1").get(); - e = expectThrows(IllegalArgumentException.class, () -> client().prepareSearch("test1", "test2").get()); + e = expectThrows(IllegalArgumentException.class, () -> prepareSearch("test1", "test2").get()); assertThat( e.getMessage(), containsString( @@ -425,8 +423,7 @@ public void testSearchIdle() throws Exception { client().prepareIndex("test").setId("2").setSource("created_date", "2020-01-02").get(); client().prepareIndex("test").setId("3").setSource("created_date", "2020-01-03").get(); assertBusy(() -> { - SearchResponse resp = client().prepareSearch("test") - .setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + SearchResponse resp = prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setPreFilterShardSize(randomIntBetween(1, 3)) .get(); assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); @@ -442,8 +439,7 @@ public void testCircuitBreakerReduceFail() throws Exception { final CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { int batchReduceSize = randomIntBetween(2, Math.max(numShards + 1, 3)); - SearchRequest request = client().prepareSearch("test") - .addAggregation(new TestAggregationBuilder("test")) + SearchRequest request = prepareSearch("test").addAggregation(new TestAggregationBuilder("test")) .setBatchedReduceSize(batchReduceSize) .request(); final int index = i; @@ -484,8 +480,7 @@ public void onFailure(Exception e) { final CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { int batchReduceSize = randomIntBetween(2, Math.max(numShards + 1, 3)); - SearchRequest request = client().prepareSearch("test") - .addAggregation(new TestAggregationBuilder("test")) + SearchRequest request = prepareSearch("test").addAggregation(new TestAggregationBuilder("test")) .setBatchedReduceSize(batchReduceSize) .request(); final int index = i; @@ -522,8 +517,7 @@ public void testCircuitBreakerFetchFail() throws Exception { final CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { int batchReduceSize = randomIntBetween(2, Math.max(numShards + 1, 3)); - SearchRequest request = client().prepareSearch("boom") - .setBatchedReduceSize(batchReduceSize) + SearchRequest request = prepareSearch("boom").setBatchedReduceSize(batchReduceSize) .setAllowPartialSearchResults(false) .request(); final int index = i; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java index b5ca2de799f92..a377ba9eb94ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/ActiveShardsObserverIT.java @@ -55,8 +55,7 @@ public void testCreateIndexNoActiveShardsNoWaiting() throws Exception { settingsBuilder.put("index.routing.allocation.exclude._name", exclude); } Settings settings = settingsBuilder.build(); - CreateIndexResponse response = prepareCreate("test-idx").setSettings(settings).setWaitForActiveShards(ActiveShardCount.NONE).get(); - assertTrue(response.isAcknowledged()); + assertAcked(prepareCreate("test-idx").setSettings(settings).setWaitForActiveShards(ActiveShardCount.NONE)); } public void testCreateIndexNotEnoughActiveShardsTimesOut() throws Exception { @@ -86,9 +85,7 @@ public void testCreateIndexEnoughActiveShards() throws Exception { .put(INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), internalCluster().numDataNodes() + randomIntBetween(0, 3)) .build(); assertAcked( - prepareCreate(indexName).setSettings(settings) - .setWaitForActiveShards(randomIntBetween(0, internalCluster().numDataNodes())) - .get() + prepareCreate(indexName).setSettings(settings).setWaitForActiveShards(randomIntBetween(0, internalCluster().numDataNodes())) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java index 9e50d57f5eb99..837c55e81b471 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/master/IndexingMasterFailoverIT.java @@ -97,7 +97,7 @@ public void run() { ensureGreen("myindex"); refresh(); - assertThat(client().prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L)); + assertThat(prepareSearch("myindex").get().getHits().getTotalHits().value, equalTo(10L)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java index 73dcce55aa2a0..05a39c02808ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/support/replication/TransportReplicationActionRetryOnClosedNodeIT.java @@ -205,13 +205,9 @@ public void testRetryOnStoppedTransportService() throws Exception { assertTrue(primaryTestPlugin.actionRunningLatch.await(10, TimeUnit.SECONDS)); - MockTransportService primaryTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - primary - ); // we pause node after TransportService has moved to stopped, but before closing connections, since if connections are closed // we would not hit the transport service closed case. - primaryTransportService.addOnStopListener(() -> { + MockTransportService.getInstance(primary).addOnStopListener(() -> { primaryTestPlugin.actionWaitLatch.countDown(); safeAwait(doneLatch); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 2155192414246..c1ca4c60f176e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -64,7 +64,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.emptyArray; @@ -269,68 +269,65 @@ public void testSearchingFilteringAliasesSingleIndex() throws Exception { ).actionGet(); logger.info("--> checking single filtering alias search"); - SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get(); + SearchResponse searchResponse = prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1"); logger.info("--> checking single filtering alias wildcard search"); - searchResponse = client().prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("fo*").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1"); - searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3"); logger.info("--> checking single filtering alias search with sort"); - searchResponse = client().prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_index", SortOrder.ASC).get(); + searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchAllQuery()).addSort("_index", SortOrder.ASC).get(); assertHits(searchResponse.getHits(), "1", "2", "3"); logger.info("--> checking single filtering alias search with global facets"); - searchResponse = client().prepareSearch("tests") - .setQuery(QueryBuilders.matchQuery("name", "bar")) + searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar")) .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("test").field("name"))) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); Global global = searchResponse.getAggregations().get("global"); Terms terms = global.getAggregations().get("test"); assertThat(terms.getBuckets().size(), equalTo(4)); logger.info("--> checking single filtering alias search with global facets and sort"); - searchResponse = client().prepareSearch("tests") - .setQuery(QueryBuilders.matchQuery("name", "bar")) + searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar")) .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("test").field("name"))) .addSort("_index", SortOrder.ASC) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); global = searchResponse.getAggregations().get("global"); terms = global.getAggregations().get("test"); assertThat(terms.getBuckets().size(), equalTo(4)); logger.info("--> checking single filtering alias search with non-global facets"); - searchResponse = client().prepareSearch("tests") - .setQuery(QueryBuilders.matchQuery("name", "bar")) + searchResponse = prepareSearch("tests").setQuery(QueryBuilders.matchQuery("name", "bar")) .addAggregation(AggregationBuilders.terms("test").field("name")) .addSort("_index", SortOrder.ASC) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); terms = searchResponse.getAggregations().get("test"); assertThat(terms.getBuckets().size(), equalTo(2)); - searchResponse = client().prepareSearch("foos", "bars").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("foos", "bars").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2"); logger.info("--> checking single non-filtering alias search"); - searchResponse = client().prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3", "4"); logger.info("--> checking non-filtering alias and filtering alias search"); - searchResponse = client().prepareSearch("alias1", "foos").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("alias1", "foos").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3", "4"); logger.info("--> checking index and filtering alias search"); - searchResponse = client().prepareSearch("test", "foos").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("test", "foos").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3", "4"); logger.info("--> checking index and alias wildcard search"); - searchResponse = client().prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("te*", "fo*").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3", "4"); } @@ -376,61 +373,50 @@ public void testSearchingFilteringAliasesTwoIndices() throws Exception { refresh(); logger.info("--> checking filtering alias for two indices"); - SearchResponse searchResponse = client().prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get(); + SearchResponse searchResponse = prepareSearch("foos").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "5"); assertThat( - client().prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, + prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(2L) ); logger.info("--> checking filtering alias for one index"); - searchResponse = client().prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("bars").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "2"); assertThat( - client().prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, + prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L) ); logger.info("--> checking filtering alias for two indices and one complete index"); - searchResponse = client().prepareSearch("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("foos", "test1").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5"); assertThat( - client().prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, + prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(5L) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for one index"); - searchResponse = client().prepareSearch("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("foos", "aliasToTest1").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3", "4", "5"); assertThat( - client().prepareSearch("foos", "aliasToTest1") - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, + prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(5L) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); - searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)); assertThat( - client().prepareSearch("foos", "aliasToTests") - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, + prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(8L) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); - searchResponse = client().prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get(); + searchResponse = prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.termQuery("name", "something")).get(); assertHits(searchResponse.getHits(), "4", "8"); assertThat( - client().prepareSearch("foos", "aliasToTests") - .setSize(0) + prepareSearch("foos", "aliasToTests").setSize(0) .setQuery(QueryBuilders.termQuery("name", "something")) .get() .getHits() @@ -491,47 +477,31 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { refresh(); logger.info("--> checking filtering alias for multiple indices"); - SearchResponse searchResponse = client().prepareSearch("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get(); + SearchResponse searchResponse = prepareSearch("filter23", "filter13").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "21", "31", "13", "33"); assertThat( - client().prepareSearch("filter23", "filter13") - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, + prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(4L) ); - searchResponse = client().prepareSearch("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("filter23", "filter1").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "21", "31", "11", "12", "13"); assertThat( - client().prepareSearch("filter23", "filter1") - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, + prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(5L) ); - searchResponse = client().prepareSearch("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("filter13", "filter1").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "11", "12", "13", "33"); assertThat( - client().prepareSearch("filter13", "filter1") - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .get() - .getHits() - .getTotalHits().value, + prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(4L) ); - searchResponse = client().prepareSearch("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("filter13", "filter1", "filter23").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "11", "12", "13", "21", "31", "33"); assertThat( - client().prepareSearch("filter13", "filter1", "filter23") - .setSize(0) + prepareSearch("filter13", "filter1", "filter23").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .get() .getHits() @@ -539,11 +509,10 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { equalTo(6L) ); - searchResponse = client().prepareSearch("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("filter23", "filter13", "test2").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "21", "22", "23", "31", "13", "33"); assertThat( - client().prepareSearch("filter23", "filter13", "test2") - .setSize(0) + prepareSearch("filter23", "filter13", "test2").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .get() .getHits() @@ -551,11 +520,10 @@ public void testSearchingFilteringAliasesMultipleIndices() throws Exception { equalTo(6L) ); - searchResponse = client().prepareSearch("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("filter23", "filter13", "test1", "test2").setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "11", "12", "13", "21", "22", "23", "31", "33"); assertThat( - client().prepareSearch("filter23", "filter13", "test1", "test2") - .setSize(0) + prepareSearch("filter23", "filter13", "test1", "test2").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .get() .getHits() @@ -614,7 +582,7 @@ public void testDeletingByQueryFilteringAliases() throws Exception { logger.info("--> checking counts before delete"); assertThat( - client().prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, + prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()).get().getHits().getTotalHits().value, equalTo(1L) ); } @@ -1141,8 +1109,8 @@ public void testAliasFilterWithNowInRangeFilterAndQuery() { client().prepareIndex("my-index").setSource("timestamp", "2016-12-12").get(); if (i % 2 == 0) { refresh(); - assertHitCount(client().prepareSearch("filter1"), i); - assertHitCount(client().prepareSearch("filter2"), i); + assertHitCount(prepareSearch("filter1"), i); + assertHitCount(prepareSearch("filter2"), i); } } } @@ -1239,7 +1207,7 @@ public void testRemoveIndexAndReplaceWithAlias() throws InterruptedException, Ex "test_2", () -> assertAcked(indicesAdmin().prepareAliases().addAlias("test_2", "test").removeIndex("test")) ); - assertHitCount(client().prepareSearch("test"), 1); + assertHitCount(prepareSearch("test"), 1); } public void testHiddenAliasesMustBeConsistent() { @@ -1331,22 +1299,21 @@ public void testIndexingAndQueryingHiddenAliases() throws Exception { refresh(writeIndex, nonWriteIndex); // Make sure that the doc written to the alias made it - SearchResponse searchResponse = client().prepareSearch(writeIndex).setQuery(QueryBuilders.matchAllQuery()).get(); + SearchResponse searchResponse = prepareSearch(writeIndex).setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "2", "3"); // Ensure that all docs can be gotten through the alias - searchResponse = client().prepareSearch(alias).setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch(alias).setQuery(QueryBuilders.matchAllQuery()).get(); assertHits(searchResponse.getHits(), "1", "2", "3"); // And querying using a wildcard with indices options set to expand hidden - searchResponse = client().prepareSearch("alias*") - .setQuery(QueryBuilders.matchAllQuery()) + searchResponse = prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery()) .setIndicesOptions(IndicesOptions.fromOptions(false, false, true, false, true, true, true, false, false)) .get(); assertHits(searchResponse.getHits(), "1", "2", "3"); // And that querying the alias with a wildcard and no expand options fails - searchResponse = client().prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("alias*").setQuery(QueryBuilders.matchAllQuery()).get(); assertThat(searchResponse.getHits().getHits(), emptyArray()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java index 91253b57f8526..9b78bb9369fd7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java @@ -265,7 +265,7 @@ public void testAddIndexBlock() throws Exception { } indicesAdmin().prepareRefresh(indexName).get(); - assertHitCount(client().prepareSearch(indexName).setSize(0), nbDocs); + assertHitCount(prepareSearch(indexName).setSize(0), nbDocs); } public void testSameBlockTwice() throws Exception { @@ -390,7 +390,7 @@ public void testAddBlockWhileIndexingDocuments() throws Exception { disableIndexBlock(indexName, block); } refresh(indexName); - assertHitCount(client().prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs); + assertHitCount(prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs); } public void testAddBlockWhileDeletingIndices() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index 1542c2880683b..5e8e6c634fa47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -42,7 +42,7 @@ public void testBroadcastOperations() throws IOException { // check count for (int i = 0; i < 5; i++) { // test successful - SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); + SearchResponse countResponse = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/client/internal/node/NodeClientIT.java b/server/src/internalClusterTest/java/org/elasticsearch/client/internal/node/NodeClientIT.java deleted file mode 100644 index c260b873d5ad9..0000000000000 --- a/server/src/internalClusterTest/java/org/elasticsearch/client/internal/node/NodeClientIT.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.internal.node; - -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; - -import static org.hamcrest.Matchers.is; - -@ClusterScope(scope = Scope.SUITE) -public class NodeClientIT extends ESIntegTestCase { - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .put(Client.CLIENT_TYPE_SETTING_S.getKey(), "anything") - .build(); - } - - public void testThatClientTypeSettingCannotBeChanged() { - for (Settings settings : internalCluster().getInstances(Settings.class)) { - assertThat(Client.CLIENT_TYPE_SETTING_S.get(settings), is("node")); - } - } -} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java index 3846711764ec9..48ba897ebb76c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterInfoServiceIT.java @@ -240,10 +240,7 @@ public void testClusterInfoServiceInformationClearOnError() { ); } - MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - internalTestCluster.getMasterName() - ); + final var masterTransportService = MockTransportService.getInstance(internalTestCluster.getMasterName()); final AtomicBoolean timeout = new AtomicBoolean(false); final Set blockedActions = newHashSet( @@ -254,7 +251,7 @@ public void testClusterInfoServiceInformationClearOnError() { ); // drop all outgoing stats requests to force a timeout. for (DiscoveryNode node : internalTestCluster.clusterService().state().getNodes()) { - mockTransportService.addSendBehavior( + masterTransportService.addSendBehavior( internalTestCluster.getInstance(TransportService.class, node.getName()), (connection, requestId, action, request, options) -> { if (blockedActions.contains(action)) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java index c0bd061e3e963..c437f2b5a4c8c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/MinimumMasterNodesIT.java @@ -107,13 +107,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> verify we get the data back"); for (int i = 0; i < 10; i++) { assertThat( - client().prepareSearch() - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(100L) ); } @@ -161,7 +155,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> verify we get the data back after cluster reform"); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } logger.info("--> clearing voting config exclusions"); @@ -208,7 +202,7 @@ public void testTwoNodesNoMasterBlock() throws Exception { logger.info("--> verify we the data back"); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } } @@ -261,7 +255,7 @@ public void testThreeNodesNoMasterBlock() throws Exception { refresh(); logger.info("--> verify we get the data back"); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } List nonMasterNodes = new ArrayList<>( @@ -290,7 +284,7 @@ public void testThreeNodesNoMasterBlock() throws Exception { logger.info("--> verify we the data back"); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); + assertHitCount(prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()), 100); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java index f620cd4697715..11b3027c23550 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/PrevalidateNodeRemovalIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; -import org.elasticsearch.transport.TransportService; import java.util.Arrays; import java.util.Collection; @@ -131,16 +130,15 @@ public void testNodeRemovalFromRedClusterWithLocalShardCopy() throws Exception { // its ACTION_SHARD_EXISTS requests since after a relocation, the source first waits // until the shard exists somewhere else, then it removes it locally. final CountDownLatch shardActiveRequestSent = new CountDownLatch(1); - MockTransportService node1transport = (MockTransportService) internalCluster().getInstance(TransportService.class, node1); - TransportService node2transport = internalCluster().getInstance(TransportService.class, node2); - node1transport.addSendBehavior(node2transport, (connection, requestId, action, request, options) -> { - if (action.equals(IndicesStore.ACTION_SHARD_EXISTS)) { - shardActiveRequestSent.countDown(); - logger.info("prevent shard active request from being sent"); - throw new ConnectTransportException(connection.getNode(), "DISCONNECT: simulated"); - } - connection.sendRequest(requestId, action, request, options); - }); + MockTransportService.getInstance(node1) + .addSendBehavior(MockTransportService.getInstance(node2), (connection, requestId, action, request, options) -> { + if (action.equals(IndicesStore.ACTION_SHARD_EXISTS)) { + shardActiveRequestSent.countDown(); + logger.info("prevent shard active request from being sent"); + throw new ConnectTransportException(connection.getNode(), "DISCONNECT: simulated"); + } + connection.sendRequest(requestId, action, request, options); + }); logger.info("--> move shard from {} to {}, and wait for relocation to finish", node1, node2); updateIndexSettings(Settings.builder().put("index.routing.allocation.require._name", node2), indexName); shardActiveRequestSent.await(); @@ -179,13 +177,10 @@ public void testNodeRemovalFromRedClusterWithTimeout() throws Exception { // make it red! internalCluster().stopNode(node1); ensureRed(indexName); - MockTransportService node2TransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node2); - node2TransportService.addRequestHandlingBehavior( - TransportPrevalidateShardPathAction.ACTION_NAME + "[n]", - (handler, request, channel, task) -> { + MockTransportService.getInstance(node2) + .addRequestHandlingBehavior(TransportPrevalidateShardPathAction.ACTION_NAME + "[n]", (handler, request, channel, task) -> { logger.info("drop the check shards request"); - } - ); + }); PrevalidateNodeRemovalRequest req = PrevalidateNodeRemovalRequest.builder() .setNames(node2) .build() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java index 5731163def260..f4457a7db8b7c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/SimpleClusterStateIT.java @@ -15,11 +15,9 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.RoutingTable; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; @@ -28,20 +26,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -55,7 +44,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -256,7 +244,6 @@ public void testLargeClusterStatePublishing() throws Exception { ) .setMapping(mapping) .setTimeout("60s") - .get() ); ensureGreen(); // wait for green state, so its both green, and there are no more pending events MappingMetadata masterMappingMetadata = indicesAdmin().prepareGetMappings("test").get().getMappings().get("test"); @@ -403,22 +390,9 @@ public List getNamedWriteables() { private final AtomicBoolean installed = new AtomicBoolean(); @Override - public Collection createComponents( - final Client client, - final ClusterService clusterService, - final ThreadPool threadPool, - final ResourceWatcherService resourceWatcherService, - final ScriptService scriptService, - final NamedXContentRegistry xContentRegistry, - final Environment environment, - final NodeEnvironment nodeEnvironment, - final NamedWriteableRegistry namedWriteableRegistry, - final IndexNameExpressionResolver expressionResolver, - final Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + ClusterService clusterService = services.clusterService(); + clusterService.addListener(event -> { final ClusterState state = event.state(); if (state.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java index b91fbb1c9b79f..f2fb19825371f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/allocation/FilteringAllocationIT.java @@ -52,13 +52,7 @@ public void testDecommissionNodeNoReplicas() { } indicesAdmin().prepareRefresh().execute().actionGet(); assertThat( - client().prepareSearch() - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(100L) ); @@ -89,13 +83,7 @@ public void testDecommissionNodeNoReplicas() { indicesAdmin().prepareRefresh().execute().actionGet(); assertThat( - client().prepareSearch() - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(100L) ); } @@ -151,13 +139,7 @@ public void testDisablingAllocationFiltering() { } indicesAdmin().prepareRefresh().execute().actionGet(); assertThat( - client().prepareSearch() - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(100L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java index df6e6dada3934..3a2c6b5ebd0f7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/RareClusterStateIT.java @@ -213,7 +213,7 @@ public void testDeleteCreateInOneBulk() throws Exception { long dataClusterStateVersion = internalCluster().clusterService(dataNode).state().version(); assertThat(masterClusterStateVersion, equalTo(dataClusterStateVersion)); }); - assertHitCount(client().prepareSearch("test"), 0); + assertHitCount(prepareSearch("test"), 0); } public void testDelayedMappingPropagationOnPrimary() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java index 33e67520b82b6..dee6ac3859b15 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/VotingConfigurationIT.java @@ -92,10 +92,7 @@ public void testElectsNodeNotInVotingConfiguration() throws Exception { if (sender.equals(excludedNodeName)) { continue; } - final MockTransportService senderTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - sender - ); + final var senderTransportService = MockTransportService.getInstance(sender); for (final String receiver : nodeNames) { senderTransportService.addSendBehavior( internalCluster().getInstance(TransportService.class, receiver), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java index aff04ca521844..8bbdaf7388699 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceIT.java @@ -10,30 +10,16 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; import java.util.function.UnaryOperator; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -65,41 +51,11 @@ public TestPlugin(Settings settings) { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - clusterService.getClusterSettings().addSettingsUpdateConsumer(UPDATE_TEMPLATE_DUMMY_SETTING, integer -> { + public Collection createComponents(PluginServices services) { + services.clusterService().getClusterSettings().addSettingsUpdateConsumer(UPDATE_TEMPLATE_DUMMY_SETTING, integer -> { logger.debug("the template dummy setting was updated to {}", integer); }); - return super.createComponents( - client, - clusterService, - threadPool, - resourceWatcherService, - scriptService, - xContentRegistry, - environment, - nodeEnvironment, - namedWriteableRegistry, - expressionResolver, - repositoriesServiceSupplier, - telemetryProvider, - allocationService, - indicesService - ); + return super.createComponents(services); } @Override @@ -128,13 +84,9 @@ public void testTemplateUpdate() throws Exception { assertTemplates(); // Change some templates - assertAcked(indicesAdmin().preparePutTemplate("test_dummy_template").setOrder(0).setPatterns(Collections.singletonList("*")).get()); - assertAcked( - indicesAdmin().preparePutTemplate("test_changed_template").setOrder(0).setPatterns(Collections.singletonList("*")).get() - ); - assertAcked( - indicesAdmin().preparePutTemplate("test_removed_template").setOrder(1).setPatterns(Collections.singletonList("*")).get() - ); + assertAcked(indicesAdmin().preparePutTemplate("test_dummy_template").setOrder(0).setPatterns(Collections.singletonList("*"))); + assertAcked(indicesAdmin().preparePutTemplate("test_changed_template").setOrder(0).setPatterns(Collections.singletonList("*"))); + assertAcked(indicesAdmin().preparePutTemplate("test_removed_template").setOrder(1).setPatterns(Collections.singletonList("*"))); AtomicInteger updateCount = new AtomicInteger(); // Wait for the templates to be updated back to normal diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java index a086f3e0b7777..2f3618f1d6aa7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/PrimaryAllocationIT.java @@ -86,7 +86,7 @@ public void testBulkWeirdScenario() throws Exception { internalCluster().startDataOnlyNodes(2); assertAcked( - indicesAdmin().prepareCreate("test").setSettings(indexSettings(1, 1).put("index.global_checkpoint_sync.interval", "1s")).get() + indicesAdmin().prepareCreate("test").setSettings(indexSettings(1, 1).put("index.global_checkpoint_sync.interval", "1s")) ); ensureGreen(); @@ -177,7 +177,7 @@ public void testDoNotAllowStaleReplicasToBePromotedToPrimary() throws Exception logger.info("--> check that the up-to-date primary shard gets promoted and that documents are available"); ensureYellow("test"); - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2L); } public void testFailedAllocationOfStalePrimaryToDataNodeWithNoData() throws Exception { @@ -399,9 +399,7 @@ public void testDoNotRemoveAllocationIdOnNodeLeave() throws Exception { internalCluster().startMasterOnlyNode(Settings.EMPTY); internalCluster().startDataOnlyNode(Settings.EMPTY); assertAcked( - indicesAdmin().prepareCreate("test") - .setSettings(indexSettings(1, 1).put("index.unassigned.node_left.delayed_timeout", "0ms")) - .get() + indicesAdmin().prepareCreate("test").setSettings(indexSettings(1, 1).put("index.unassigned.node_left.delayed_timeout", "0ms")) ); String replicaNode = internalCluster().startDataOnlyNode(Settings.EMPTY); ensureGreen("test"); @@ -430,9 +428,7 @@ public void testRemoveAllocationIdOnWriteAfterNodeLeave() throws Exception { internalCluster().startMasterOnlyNode(Settings.EMPTY); internalCluster().startDataOnlyNode(Settings.EMPTY); assertAcked( - indicesAdmin().prepareCreate("test") - .setSettings(indexSettings(1, 1).put("index.unassigned.node_left.delayed_timeout", "0ms")) - .get() + indicesAdmin().prepareCreate("test").setSettings(indexSettings(1, 1).put("index.unassigned.node_left.delayed_timeout", "0ms")) ); String replicaNode = internalCluster().startDataOnlyNode(Settings.EMPTY); ensureGreen("test"); @@ -475,7 +471,7 @@ public void testNotWaitForQuorumCopies() throws Exception { internalCluster().restartRandomDataNode(); logger.info("--> checking that index still gets allocated with only 1 shard copy being available"); ensureYellow("test"); - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 1L); } /** diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java index 0bcf49bdd2ae2..72d165ea0cf73 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/ShardRoutingRoleIT.java @@ -253,7 +253,7 @@ private static void assertRolesInRoutingTableXContent(ClusterState state) { } } } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } } @@ -498,7 +498,7 @@ public void testSearchRouting() throws Exception { } // Regular search for (int i = 0; i < 10; i++) { - final var search = client().prepareSearch(INDEX_NAME).setProfile(true); + final var search = prepareSearch(INDEX_NAME).setProfile(true); switch (randomIntBetween(0, 2)) { case 0 -> search.setRouting(randomAlphaOfLength(10)); case 1 -> search.setPreference(randomSearchPreference(routingTableWatcher.numShards, internalCluster().getNodeNames())); @@ -526,8 +526,7 @@ public void testSearchRouting() throws Exception { } String pitId = client().execute(OpenPointInTimeAction.INSTANCE, openRequest).actionGet().getPointInTimeId(); try { - final var profileResults = client().prepareSearch() - .setPointInTime(new PointInTimeBuilder(pitId)) + final var profileResults = prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)) .setProfile(true) .get() .getProfileResults(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceIT.java new file mode 100644 index 0000000000000..e85edc5805482 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceIT.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.routing.allocation; + +import org.elasticsearch.action.admin.indices.shrink.ResizeType; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.RoutingNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.node.HealthInfo; +import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.test.ESIntegTestCase; +import org.hamcrest.Matcher; + +import java.util.ArrayList; +import java.util.Map; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; + +public class ShardsAvailabilityHealthIndicatorServiceIT extends ESIntegTestCase { + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99951") + public void testIsGreenDuringIndexCreate() { + internalCluster().ensureAtLeastNumDataNodes(2); + + assertHealthDuring(equalTo(GREEN), () -> { + var index = randomIdentifier(); + prepareCreate(index).setSettings(indexSettings(1, 1)).get(); + ensureGreen(index); + }); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99951") + public void testIsGreenWhenNewReplicaAdded() { + internalCluster().ensureAtLeastNumDataNodes(2); + + var index = randomIdentifier(); + prepareCreate(index).setSettings(indexSettings(1, 0)).get(); + ensureGreen(index); + + assertHealthDuring(equalTo(GREEN), () -> { + updateIndexSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1), index); + ensureGreen(index); + }); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99951") + public void testIsGreenDuringSnapshotRestore() { + + internalCluster().ensureAtLeastNumDataNodes(2); + + var index = randomIdentifier(); + prepareCreate(index).setSettings(indexSettings(1, 1)).get(); + ensureGreen(index); + + var repositoryName = "repository"; + var snapshotName = randomIdentifier(); + assertAcked( + clusterAdmin().preparePutRepository(repositoryName) + .setType("fs") + .setSettings(Settings.builder().put("location", randomRepoPath())) + ); + clusterAdmin().prepareCreateSnapshot(repositoryName, snapshotName).setIndices(index).setWaitForCompletion(true).get(); + if (randomBoolean()) { + assertAcked(indicesAdmin().prepareDelete(index)); + } else { + assertAcked(indicesAdmin().prepareClose(index)); + } + ensureGreen(); + + assertHealthDuring(equalTo(GREEN), () -> { + clusterAdmin().prepareRestoreSnapshot(repositoryName, snapshotName).setIndices(index).setWaitForCompletion(true).get(); + ensureGreen(index); + }); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99951") + public void testIsGreenDuringIndexClone() { + + internalCluster().ensureAtLeastNumDataNodes(2); + + var sourceIndex = randomIdentifier(); + var targetIndex = randomIdentifier(); + prepareCreate(sourceIndex).setSettings(indexSettings(1, 1)).get(); + ensureGreen(sourceIndex); + updateIndexSettings(Settings.builder().put("index.blocks.write", true), sourceIndex); + + assertHealthDuring(equalTo(GREEN), () -> { + indicesAdmin().prepareResizeIndex(sourceIndex, targetIndex).setResizeType(ResizeType.CLONE).get(); + ensureGreen(targetIndex); + }); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/99951") + public void testIsGreenDuringOpeningAndClosingIndex() { + + internalCluster().ensureAtLeastNumDataNodes(2); + + var index = randomIdentifier(); + prepareCreate(index).setSettings(indexSettings(1, 1)).get(); + ensureGreen(index); + + assertHealthDuring(equalTo(GREEN), () -> { + indicesAdmin().prepareClose(index).get(); + ensureGreen(index); + indicesAdmin().prepareClose(index).get(); + ensureGreen(index); + }); + } + + private void assertHealthDuring(Matcher statusMatcher, Runnable action) { + var clusterService = internalCluster().getCurrentMasterNodeInstance(ClusterService.class); + var allocationService = internalCluster().getCurrentMasterNodeInstance(AllocationService.class); + var systemIndices = internalCluster().getCurrentMasterNodeInstance(SystemIndices.class); + + var service = new ShardsAvailabilityHealthIndicatorService(clusterService, allocationService, systemIndices); + var states = new ArrayList(); + var listener = new ClusterStateListener() { + @Override + public void clusterChanged(ClusterChangedEvent event) { + states.add( + new RoutingNodesAndHealth(event.state().getRoutingNodes(), service.calculate(false, 1, new HealthInfo(Map.of()))) + ); + } + }; + + clusterService.addListener(listener); + try { + action.run(); + + for (RoutingNodesAndHealth state : states) { + state.assertHealth(statusMatcher); + } + } finally { + clusterService.removeListener(listener); + } + } + + private record RoutingNodesAndHealth(RoutingNodes routing, HealthIndicatorResult health) { + private void assertHealth(Matcher statusMatcher) { + assertThat("Health [" + health + "] for routing: " + routing, health.status(), statusMatcher); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index 8b4a82cc36c20..965674b772998 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -195,7 +195,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { } client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); - assertSearchHits(client().prepareSearch("test"), "1"); + assertSearchHits(prepareSearch("test"), "1"); // Move all nodes above the low watermark so no shard movement can occur, and at least one node above the flood stage watermark so // the index is blocked @@ -221,7 +221,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { client().prepareIndex().setIndex("test").setId("2").setSource("foo", "bar"), IndexMetadata.INDEX_READ_ONLY_ALLOW_DELETE_BLOCK ); - assertSearchHits(client().prepareSearch("test"), "1"); + assertSearchHits(prepareSearch("test"), "1"); logger.info("--> index is confirmed read-only, releasing disk space"); @@ -240,7 +240,7 @@ public void testAutomaticReleaseOfIndexBlock() throws Exception { throw new AssertionError("retrying", e); } }); - assertSearchHits(client().prepareSearch("test"), "1", "3"); + assertSearchHits(prepareSearch("test"), "1", "3"); } public void testOnlyMovesEnoughShardsToDropBelowHighWatermark() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java index 022f7acedc517..7e3adf8e0283f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java @@ -349,9 +349,7 @@ private void testRemoveArchiveSettingsWithBlocks(boolean readOnly, boolean readO if (readOnlyAllowDelete) { settingsBuilder.put(Metadata.SETTING_READ_ONLY_ALLOW_DELETE_SETTING.getKey(), "true"); } - assertAcked( - clusterAdmin().prepareUpdateSettings().setPersistentSettings(settingsBuilder).setTransientSettings(settingsBuilder).get() - ); + assertAcked(clusterAdmin().prepareUpdateSettings().setPersistentSettings(settingsBuilder).setTransientSettings(settingsBuilder)); ClusterState state = clusterAdmin().prepareState().get().getState(); if (readOnly) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsUpdateWithFaultyMasterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsUpdateWithFaultyMasterIT.java index 1f31b2155846e..5caff409a2052 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsUpdateWithFaultyMasterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/ClusterSettingsUpdateWithFaultyMasterIT.java @@ -62,7 +62,7 @@ public void testClusterSettingsUpdateNotAcknowledged() throws Exception { .cluster() .prepareUpdateSettings() .setPersistentSettings(Settings.builder().put(BlockingClusterSettingTestPlugin.TEST_BLOCKING_SETTING.getKey(), true).build()) - .setMasterNodeTimeout(TimeValue.timeValueMillis(10L)) + .setMasterNodeTimeout(TimeValue.timeValueMillis(100L)) .execute(); logger.info("--> waiting for cluster state update to be blocked"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java index 2838987388598..85aa7d6206a5e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/settings/SettingsFilteringIT.java @@ -80,7 +80,6 @@ public void testSettingsFiltering() { .put("filter_test.notfoo", "test") .build() ) - .get() ); GetSettingsResponse response = indicesAdmin().prepareGetSettings("test-idx").get(); Settings settings = response.getIndexToSettings().get("test-idx"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java index d0311740fc637..58c13b8a6c721 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/shards/ClusterShardLimitIT.java @@ -121,7 +121,6 @@ public void testIndexCreationOverLimitFromTemplate() { .setPatterns(Collections.singletonList("should-fail")) .setOrder(1) .setSettings(indexSettings(counts.getFailingIndexShards(), counts.getFailingIndexReplicas())) - .get() ); final IllegalArgumentException e = expectThrows( @@ -243,7 +242,6 @@ public void testPreserveExistingSkipsCheck() { indicesAdmin().prepareUpdateSettings("test-index") .setPreserveExisting(true) .setSettings(Settings.builder().put("number_of_replicas", dataNodes)) - .get() ); ClusterState clusterState = clusterAdmin().prepareState().get().getState(); assertEquals(0, clusterState.getMetadata().index("test-index").getNumberOfReplicas()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java index 848f16b7cd5d0..2561799b475ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionCleanSettingsIT.java @@ -63,6 +63,6 @@ public void testSearchWithRelocationAndSlowClusterStateProcessing() throws Excep IndicesStoreIntegrationIT.relocateAndBlockCompletion(logger, "test", 0, node_1, node_2); // now search for the documents and see if we get a reply - assertThat(client().prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertThat(prepareSearch().setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index ce9ec8b5fc75c..586e95484afa4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -42,7 +42,6 @@ import org.elasticsearch.test.disruption.ServiceDisruptionScheme; import org.elasticsearch.test.junit.annotations.TestIssueLogging; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; import java.util.ArrayList; @@ -567,23 +566,20 @@ protected boolean blockingAllowed() { try { Thread.sleep(100); } catch (InterruptedException e) { - throw new AssertionError("unexpected", e); + fail(e); } } } }); - final MockTransportService dataTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - dataNode - ); - dataTransportService.addRequestHandlingBehavior(FollowersChecker.FOLLOWER_CHECK_ACTION_NAME, (handler, request, channel, task) -> { - if (removedNode.isDone() == false) { - channel.sendResponse(new ElasticsearchException("simulated check failure")); - } else { - handler.messageReceived(request, channel, task); - } - }); + MockTransportService.getInstance(dataNode) + .addRequestHandlingBehavior(FollowersChecker.FOLLOWER_CHECK_ACTION_NAME, (handler, request, channel, task) -> { + if (removedNode.isDone() == false) { + channel.sendResponse(new ElasticsearchException("simulated check failure")); + } else { + handler.messageReceived(request, channel, task); + } + }); removedNode.actionGet(10, TimeUnit.SECONDS); ensureStableCluster(2); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java index 1413ac453da1b..71c6ef956c4d4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/DiscoveryDisruptionIT.java @@ -57,19 +57,13 @@ public void testClusterJoinDespiteOfPublishingIssues() throws Exception { ); logger.info("blocking requests from non master [{}] to master [{}]", nonMasterNode, masterNode); - MockTransportService nonMasterTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nonMasterNode - ); + final var nonMasterTransportService = MockTransportService.getInstance(nonMasterNode); nonMasterTransportService.addFailToSendNoConnectRule(masterTranspotService); assertNoMaster(nonMasterNode); logger.info("blocking cluster state publishing from master [{}] to non master [{}]", masterNode, nonMasterNode); - MockTransportService masterTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - masterNode - ); + final var masterTransportService = MockTransportService.getInstance(masterNode); TransportService localTransportService = internalCluster().getInstance( TransportService.class, discoveryNodes.getLocalNode().getName() @@ -188,10 +182,7 @@ public void testNodeNotReachableFromMaster() throws Exception { } logger.info("blocking request from master [{}] to [{}]", masterNode, nonMasterNode); - MockTransportService masterTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - masterNode - ); + final var masterTransportService = MockTransportService.getInstance(masterNode); if (randomBoolean()) { masterTransportService.addUnresponsiveRule(internalCluster().getInstance(TransportService.class, nonMasterNode)); } else { @@ -232,13 +223,13 @@ public void testJoinWaitsForClusterApplier() { safeAwait(barrier); // drop the victim from the cluster with a network disruption - final var masterTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, masterName); + final var masterTransportService = MockTransportService.getInstance(masterName); masterTransportService.addFailToSendNoConnectRule(internalCluster().getInstance(TransportService.class, victimName)); logger.info("--> waiting for victim's departure"); ensureStableCluster(2, masterName); // verify that the victim sends no joins while the applier is blocked - final var victimTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, victimName); + final var victimTransportService = MockTransportService.getInstance(victimName); victimTransportService.addSendBehavior((connection, requestId, action, request, options) -> { assertNotEquals(action, JoinHelper.JOIN_ACTION_NAME); connection.sendRequest(requestId, action, request, options); @@ -283,13 +274,13 @@ public void testJoinWaitsForCircuitBreaker() throws InterruptedException { } // drop the victim from the cluster with a network disruption - final var masterTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, masterName); + final var masterTransportService = MockTransportService.getInstance(masterName); masterTransportService.addFailToSendNoConnectRule(internalCluster().getInstance(TransportService.class, victimName)); logger.info("--> waiting for victim's departure"); ensureStableCluster(2, masterName); // verify that the victim sends no joins while the circuit breaker is breaking - final var victimTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, victimName); + final var victimTransportService = MockTransportService.getInstance(victimName); victimTransportService.addSendBehavior((connection, requestId, action, request, options) -> { assertNotEquals(action, JoinHelper.JOIN_ACTION_NAME); connection.sendRequest(requestId, action, request, options); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index 828ae839cdd7d..42bc0f19bf757 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -154,14 +154,14 @@ public void testIndexActions() throws Exception { // check count for (int i = 0; i < 5; i++) { // test successful - SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse countResponse = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet(); assertNoFailures(countResponse); assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); // count with no query is a match all one - countResponse = client().prepareSearch("test").setSize(0).execute().actionGet(); + countResponse = prepareSearch("test").setSize(0).execute().actionGet(); assertThat( "Failures " + countResponse.getShardFailures(), countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java index 75b818d082dff..6571b9a6c928c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/ShardInfoIT.java @@ -96,7 +96,6 @@ private void prepareIndex(int numberOfPrimaryShards, boolean routingRequired) th assertAcked( prepareCreate("idx").setSettings(indexSettings(numberOfPrimaryShards, numCopies - 1)) .setMapping("_routing", "required=" + routingRequired) - .get() ); for (int i = 0; i < numberOfPrimaryShards; i++) { ensureActiveShardCopies(i, numNodes); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java b/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java index f791f527862dd..30940c1e154b0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/env/NodeEnvironmentIT.java @@ -238,7 +238,7 @@ public void testUpgradeDataFolder() throws IOException, InterruptedException { assertEquals(nodeId, clusterAdmin().prepareState().get().getState().nodes().getMasterNodeId()); assertTrue(indexExists("test")); ensureYellow("test"); - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); } public void testFailsToStartOnDataPathsFromMultipleNodes() throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java new file mode 100644 index 0000000000000..fe447eca6e8fd --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/features/ClusterFeaturesIT.java @@ -0,0 +1,48 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.features; + +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasItem; + +public class ClusterFeaturesIT extends ESIntegTestCase { + + @SuppressForbidden(reason = "Directly checking node features in cluster state") + public void testClusterHasFeatures() { + internalCluster().startNodes(2); + ensureGreen(); + + FeatureService service = internalCluster().getCurrentMasterNodeInstance(FeatureService.class); + + assertThat(service.getNodeFeatures(), hasItem(FeatureService.FEATURES_SUPPORTED.id())); + + // check the nodes all have a feature in their cluster state (there should always be features_supported) + var response = clusterAdmin().state(new ClusterStateRequest().clear().nodes(true)).actionGet(); + var features = response.getState().clusterFeatures().nodeFeatures(); + Set missing = features.entrySet() + .stream() + .filter(e -> e.getValue().contains(FeatureService.FEATURES_SUPPORTED.id()) == false) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + assertThat(missing + " out of " + features.keySet() + " does not have the required feature", missing, empty()); + + // check that all nodes have the same features + var featureList = List.copyOf(response.getState().clusterFeatures().nodeFeatures().values()); + assertEquals("Nodes do not have the same features", featureList.get(0), featureList.get(1)); + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index 1817e5c6debe6..e995d815af0f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -36,7 +36,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeMetadata; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.ShardLimitValidator; @@ -283,7 +283,7 @@ public void testTwoNodesSingleDoc() throws Exception { logger.info("--> verify 1 doc in the index"); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); } logger.info("--> closing test index..."); @@ -306,9 +306,9 @@ public void testTwoNodesSingleDoc() throws Exception { assertThat(health.isTimedOut(), equalTo(false)); logger.info("--> verify 1 doc in the index"); - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); } } @@ -400,7 +400,7 @@ public void testRecoverBrokenIndexMetadata() throws Exception { .settings( Settings.builder() .put(metadata.getSettings()) - .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.MINIMUM_COMPATIBLE) + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.MINIMUM_COMPATIBLE) // this is invalid but should be archived .put("index.similarity.BM25.type", "boolean") // this one is not validated ahead of time and breaks allocation @@ -548,7 +548,7 @@ public void testArchiveBrokenClusterSettings() throws Exception { assertNull( state.metadata().persistentSettings().get("archived." + ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey()) ); - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); } public void testHalfDeletedIndexImport() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/QuorumGatewayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/QuorumGatewayIT.java index 1e2b03d775ee7..a77201e1e141a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/QuorumGatewayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/QuorumGatewayIT.java @@ -50,7 +50,7 @@ public void testQuorumRecovery() throws Exception { refresh(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2L); } logger.info("--> restart all nodes"); internalCluster().fullRestart(new RestartCallback() { @@ -90,7 +90,7 @@ public void doAfterNodes(int numNodes, final Client activeClient) throws Excepti ensureGreen(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 3L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 3L); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java index 6d923bf5821b4..81149efb1596f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/RecoveryFromGatewayIT.java @@ -123,7 +123,7 @@ public void testOneNodeRecoverFromGateway() throws Exception { .actionGet(); refresh(); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)), 2); ensureYellow("test"); // wait for primary allocations here otherwise if we have a lot of shards we might have a // shard that is still in post recovery when we restart and the ensureYellow() below will timeout @@ -135,7 +135,7 @@ public void testOneNodeRecoverFromGateway() throws Exception { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); indicesAdmin().prepareRefresh().execute().actionGet(); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)), 2); internalCluster().fullRestart(); @@ -144,7 +144,7 @@ public void testOneNodeRecoverFromGateway() throws Exception { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); indicesAdmin().prepareRefresh().execute().actionGet(); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("appAccountIds", 179)), 2); } private Map assertAndCapturePrimaryTerms(Map previousTerms) { @@ -233,10 +233,10 @@ public void testSingleNodeNoFlush() throws Exception { refresh(); for (int i = 0; i <= randomInt(10); i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), value1Docs + value2Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")), value1Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")), value2Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)), value1Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), value1Docs + value2Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("field", "value1")), value1Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("field", "value2")), value2Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("num", 179)), value1Docs); } if (indexToAllShards == false) { // we have to verify primaries are started for them to be restored @@ -253,10 +253,10 @@ public void testSingleNodeNoFlush() throws Exception { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i <= randomInt(10); i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), value1Docs + value2Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")), value1Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")), value2Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)), value1Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), value1Docs + value2Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("field", "value1")), value1Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("field", "value2")), value2Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("num", 179)), value1Docs); } internalCluster().fullRestart(); @@ -266,10 +266,10 @@ public void testSingleNodeNoFlush() throws Exception { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i <= randomInt(10); i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), value1Docs + value2Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value1")), value1Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("field", "value2")), value2Docs); - assertHitCount(client().prepareSearch().setSize(0).setQuery(termQuery("num", 179)), value1Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), value1Docs + value2Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("field", "value1")), value1Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("field", "value2")), value2Docs); + assertHitCount(prepareSearch().setSize(0).setQuery(termQuery("num", 179)), value1Docs); } } @@ -288,7 +288,7 @@ public void testSingleNodeWithFlush() throws Exception { .actionGet(); refresh(); - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); ensureYellow("test"); // wait for primary allocations here otherwise if we have a lot of shards we might have a // shard that is still in post recovery when we restart and the ensureYellow() below will timeout @@ -302,7 +302,7 @@ public void testSingleNodeWithFlush() throws Exception { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); } internalCluster().fullRestart(); @@ -312,7 +312,7 @@ public void testSingleNodeWithFlush() throws Exception { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); } } @@ -337,7 +337,7 @@ public void testTwoNodeFirstNodeCleared() throws Exception { ensureGreen(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); } Map primaryTerms = assertAndCapturePrimaryTerms(null); @@ -365,7 +365,7 @@ public boolean clearData(String nodeName) { primaryTerms = assertAndCapturePrimaryTerms(primaryTerms); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); } client().execute(ClearVotingConfigExclusionsAction.INSTANCE, new ClearVotingConfigExclusionsRequest()).get(); @@ -395,7 +395,7 @@ public void testLatestVersionLoaded() throws Exception { ensureGreen(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2); } String metadataUuid = clusterAdmin().prepareState().execute().get().getState().getMetadata().clusterUUID(); @@ -418,7 +418,7 @@ public void testLatestVersionLoaded() throws Exception { logger.info("--> checking if documents exist, there should be 3"); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 3); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 3); } logger.info("--> add some metadata and additional template"); @@ -462,7 +462,7 @@ public void testLatestVersionLoaded() throws Exception { assertThat(clusterAdmin().prepareState().execute().get().getState().getMetadata().clusterUUID(), equalTo(metadataUuid)); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 3); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 3); } ClusterState state = clusterAdmin().prepareState().execute().actionGet().getState(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorIT.java index e22359c30265e..8cbce0cc098ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorIT.java @@ -34,7 +34,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import java.util.Arrays; import java.util.Collection; @@ -108,10 +107,7 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception { } CountDownLatch blockRecovery = new CountDownLatch(1); CountDownLatch recoveryStarted = new CountDownLatch(1); - MockTransportService transportServiceOnPrimary = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeWithPrimary - ); + final var transportServiceOnPrimary = MockTransportService.getInstance(nodeWithPrimary); transportServiceOnPrimary.addSendBehavior((connection, requestId, action, request, options) -> { if (PeerRecoveryTargetService.Actions.FILES_INFO.equals(action)) { recoveryStarted.countDown(); @@ -169,10 +165,7 @@ public void testRecentPrimaryInformation() throws Exception { } CountDownLatch blockRecovery = new CountDownLatch(1); CountDownLatch recoveryStarted = new CountDownLatch(1); - MockTransportService transportServiceOnPrimary = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeWithPrimary - ); + final var transportServiceOnPrimary = MockTransportService.getInstance(nodeWithPrimary); transportServiceOnPrimary.addSendBehavior((connection, requestId, action, request, options) -> { if (PeerRecoveryTargetService.Actions.FILES_INFO.equals(action)) { recoveryStarted.countDown(); @@ -349,10 +342,7 @@ public void testDoNotCancelRecoveryForBrokenNode() throws Exception { ); indicesAdmin().prepareFlush(indexName).get(); String brokenNode = internalCluster().startDataOnlyNode(); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeWithPrimary - ); + final var transportService = MockTransportService.getInstance(nodeWithPrimary); CountDownLatch newNodeStarted = new CountDownLatch(1); transportService.addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.TRANSLOG_OPS)) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorSyncIdIT.java b/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorSyncIdIT.java index 29e9d98852a4d..313d1e686e1fd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorSyncIdIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/gateway/ReplicaShardAllocatorSyncIdIT.java @@ -35,7 +35,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.junit.Before; import java.io.IOException; @@ -187,10 +186,7 @@ public void testPreferCopyCanPerformNoopRecovery() throws Exception { }); CountDownLatch blockRecovery = new CountDownLatch(1); CountDownLatch recoveryStarted = new CountDownLatch(1); - MockTransportService transportServiceOnPrimary = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeWithPrimary - ); + final var transportServiceOnPrimary = MockTransportService.getInstance(nodeWithPrimary); transportServiceOnPrimary.addSendBehavior((connection, requestId, action, request, options) -> { if (PeerRecoveryTargetService.Actions.FILES_INFO.equals(action)) { recoveryStarted.countDown(); @@ -279,7 +275,7 @@ public void testSimulateRecoverySourceOnOldNode() throws Exception { syncFlush(indexName); } internalCluster().startDataOnlyNode(); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, source); + final var transportService = MockTransportService.getInstance(source); Semaphore failRecovery = new Semaphore(1); transportService.addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.CLEAN_FILES)) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java index 11d32bb231a01..a31c3a08b8a4f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java @@ -11,27 +11,15 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.metrics.Counters; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.health.stats.HealthApiStatsAction; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.ArrayList; import java.util.Collection; @@ -40,7 +28,6 @@ import java.util.Map; import java.util.NoSuchElementException; import java.util.concurrent.ExecutionException; -import java.util.function.Supplier; import java.util.stream.Stream; import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; @@ -95,25 +82,10 @@ public List> getSettings() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - healthIndicatorServices.add(new IlmHealthIndicatorService(clusterService)); - healthIndicatorServices.add(new SlmHealthIndicatorService(clusterService)); - healthIndicatorServices.add(new ClusterCoordinationHealthIndicatorService(clusterService)); + public Collection createComponents(PluginServices services) { + healthIndicatorServices.add(new IlmHealthIndicatorService(services.clusterService())); + healthIndicatorServices.add(new SlmHealthIndicatorService(services.clusterService())); + healthIndicatorServices.add(new ClusterCoordinationHealthIndicatorService(services.clusterService())); return new ArrayList<>(healthIndicatorServices); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/health/HealthServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/health/HealthServiceIT.java index 707644e28228a..2e741d6691d24 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/health/HealthServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/health/HealthServiceIT.java @@ -9,37 +9,22 @@ package org.elasticsearch.health; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.health.node.DiskHealthInfo; import org.elasticsearch.health.node.FetchHealthInfoCacheAction; import org.elasticsearch.health.node.HealthInfo; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; import static org.hamcrest.Matchers.equalTo; @@ -124,22 +109,7 @@ public static final class TestHealthPlugin extends Plugin implements HealthPlugi private final List healthIndicatorServices = new ArrayList<>(); @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { healthIndicatorServices.add(new TestHealthIndicatorService()); return new ArrayList<>(healthIndicatorServices); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index ffa6799601b14..24372978834c6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -17,17 +17,9 @@ import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; @@ -35,14 +27,8 @@ import org.elasticsearch.ingest.Processor; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; import org.junit.After; @@ -52,7 +38,6 @@ import java.util.Map; import java.util.Objects; import java.util.function.BiConsumer; -import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; @@ -139,7 +124,7 @@ public void testFinalPipelineOfOldDestinationIsNotInvoked() { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); - SearchResponse target = client().prepareSearch("target").get(); + SearchResponse target = prepareSearch("target").get(); assertEquals(1, target.getHits().getTotalHits().value); assertFalse(target.getHits().getAt(0).getSourceAsMap().containsKey("final")); } @@ -165,7 +150,7 @@ public void testFinalPipelineOfNewDestinationIsInvoked() { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); - SearchResponse target = client().prepareSearch("target").get(); + SearchResponse target = prepareSearch("target").get(); assertEquals(1, target.getHits().getTotalHits().value); assertEquals(true, target.getHits().getAt(0).getSourceAsMap().get("final")); } @@ -191,7 +176,7 @@ public void testDefaultPipelineOfNewDestinationIsNotInvoked() { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); - SearchResponse target = client().prepareSearch("target").get(); + SearchResponse target = prepareSearch("target").get(); assertEquals(1, target.getHits().getTotalHits().value); assertFalse(target.getHits().getAt(0).getSourceAsMap().containsKey("final")); } @@ -217,7 +202,7 @@ public void testDefaultPipelineOfRerouteDestinationIsInvoked() { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); - SearchResponse target = client().prepareSearch("target").get(); + SearchResponse target = prepareSearch("target").get(); assertEquals(1, target.getHits().getTotalHits().value); assertTrue(target.getHits().getAt(0).getSourceAsMap().containsKey("final")); } @@ -392,26 +377,6 @@ public void testHighOrderFinalPipelinePreferred() throws IOException { public static class TestPlugin extends Plugin implements IngestPlugin { - @Override - public Collection createComponents( - final Client client, - final ClusterService clusterService, - final ThreadPool threadPool, - final ResourceWatcherService resourceWatcherService, - final ScriptService scriptService, - final NamedXContentRegistry xContentRegistry, - final Environment environment, - final NodeEnvironment nodeEnvironment, - final NamedWriteableRegistry namedWriteableRegistry, - final IndexNameExpressionResolver expressionResolver, - final Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - return List.of(); - } - @Override public Map getProcessors(Processor.Parameters parameters) { return Map.of( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java index ae1b68913460c..41bdf944edd59 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/HiddenIndexIT.java @@ -40,21 +40,19 @@ public void testHiddenIndexSearch() { client().prepareIndex("hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); // default not visible to wildcard expansion - SearchResponse searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index")) - .setSize(1000) + SearchResponse searchResponse = prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000) .setQuery(QueryBuilders.matchAllQuery()) .get(); boolean matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex())); assertFalse(matchedHidden); // direct access allowed - searchResponse = client().prepareSearch("hidden-index").setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch("hidden-index").setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex())); assertTrue(matchedHidden); // with indices option to include hidden - searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index")) - .setSize(1000) + searchResponse = prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000) .setQuery(QueryBuilders.matchAllQuery()) .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN) .get(); @@ -64,36 +62,28 @@ public void testHiddenIndexSearch() { // implicit based on use of pattern starting with . and a wildcard assertAcked(indicesAdmin().prepareCreate(".hidden-index").setSettings(Settings.builder().put("index.hidden", true).build()).get()); client().prepareIndex(".hidden-index").setSource("foo", "bar").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - searchResponse = client().prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); + searchResponse = prepareSearch(randomFrom(".*", ".hidden-*")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> ".hidden-index".equals(hit.getIndex())); assertTrue(matchedHidden); // make index not hidden updateIndexSettings(Settings.builder().put("index.hidden", false), "hidden-index"); - searchResponse = client().prepareSearch(randomFrom("*", "_all", "h*", "*index")) - .setSize(1000) - .setQuery(QueryBuilders.matchAllQuery()) - .get(); + searchResponse = prepareSearch(randomFrom("*", "_all", "h*", "*index")).setSize(1000).setQuery(QueryBuilders.matchAllQuery()).get(); matchedHidden = Arrays.stream(searchResponse.getHits().getHits()).anyMatch(hit -> "hidden-index".equals(hit.getIndex())); assertTrue(matchedHidden); } public void testGlobalTemplatesDoNotApply() { - assertAcked(indicesAdmin().preparePutTemplate("a_global_template").setPatterns(List.of("*")).setMapping("foo", "type=text").get()); - assertAcked( - indicesAdmin().preparePutTemplate("not_global_template").setPatterns(List.of("a*")).setMapping("bar", "type=text").get() - ); + assertAcked(indicesAdmin().preparePutTemplate("a_global_template").setPatterns(List.of("*")).setMapping("foo", "type=text")); + assertAcked(indicesAdmin().preparePutTemplate("not_global_template").setPatterns(List.of("a*")).setMapping("bar", "type=text")); assertAcked( - indicesAdmin().preparePutTemplate("specific_template") - .setPatterns(List.of("a_hidden_index")) - .setMapping("baz", "type=text") - .get() + indicesAdmin().preparePutTemplate("specific_template").setPatterns(List.of("a_hidden_index")).setMapping("baz", "type=text") ); assertAcked( - indicesAdmin().preparePutTemplate("unused_template").setPatterns(List.of("not_used")).setMapping("foobar", "type=text").get() + indicesAdmin().preparePutTemplate("unused_template").setPatterns(List.of("not_used")).setMapping("foobar", "type=text") ); - assertAcked(indicesAdmin().prepareCreate("a_hidden_index").setSettings(Settings.builder().put("index.hidden", true).build()).get()); + assertAcked(indicesAdmin().prepareCreate("a_hidden_index").setSettings(Settings.builder().put("index.hidden", true).build())); GetMappingsResponse mappingsResponse = indicesAdmin().prepareGetMappings("a_hidden_index").get(); assertThat(mappingsResponse.mappings().size(), is(1)); @@ -130,7 +120,6 @@ public void testNonGlobalTemplateCanMakeIndexHidden() { .setPatterns(List.of("my_hidden_pattern*")) .setMapping("foo", "type=text") .setSettings(Settings.builder().put("index.hidden", true).build()) - .get() ); assertAcked(indicesAdmin().prepareCreate("my_hidden_pattern1").get()); GetSettingsResponse getSettingsResponse = indicesAdmin().prepareGetSettings("my_hidden_pattern1").get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexRequestBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexRequestBuilderIT.java index b69bb7b42fa19..31368a3cfb8fd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/IndexRequestBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/IndexRequestBuilderIT.java @@ -37,7 +37,7 @@ public void testSetSource() throws InterruptedException, ExecutionException { client().prepareIndex("test").setSource(map) }; indexRandom(true, builders); ElasticsearchAssertions.assertHitCount( - client().prepareSearch("test").setQuery(QueryBuilders.termQuery("test_field", "foobar")), + prepareSearch("test").setQuery(QueryBuilders.termQuery("test_field", "foobar")), builders.length ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/SettingsListenerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/SettingsListenerIT.java index fb0e3478c2cde..de783a28bce1d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/SettingsListenerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/SettingsListenerIT.java @@ -7,33 +7,18 @@ */ package org.elasticsearch.index; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -63,22 +48,7 @@ public void onIndexModule(IndexModule module) { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { return Collections.singletonList(service); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java index 6970f73e591fc..ec03a740f8ade 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/WaitUntilRefreshIT.java @@ -63,25 +63,25 @@ public void testIndex() { .get(); assertEquals(RestStatus.CREATED, index.status()); assertFalse("request shouldn't have forced a refresh", index.forcedRefresh()); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); } public void testDelete() throws InterruptedException, ExecutionException { // Index normally indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar")); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); // Now delete with blockUntilRefresh DeleteResponse delete = client().prepareDelete("test", "1").setRefreshPolicy(RefreshPolicy.WAIT_UNTIL).get(); assertEquals(DocWriteResponse.Result.DELETED, delete.getResult()); assertFalse("request shouldn't have forced a refresh", delete.forcedRefresh()); - assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); + assertNoSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar"))); } public void testUpdate() throws InterruptedException, ExecutionException { // Index normally indexRandom(true, client().prepareIndex("test").setId("1").setSource("foo", "bar")); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); // Update with RefreshPolicy.WAIT_UNTIL UpdateResponse update = client().prepareUpdate("test", "1") @@ -90,7 +90,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { .get(); assertEquals(2, update.getVersion()); assertFalse("request shouldn't have forced a refresh", update.forcedRefresh()); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1"); // Upsert with RefreshPolicy.WAIT_UNTIL update = client().prepareUpdate("test", "2") @@ -100,7 +100,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { .get(); assertEquals(1, update.getVersion()); assertFalse("request shouldn't have forced a refresh", update.forcedRefresh()); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")), "2"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "cat")), "2"); // Update-becomes-delete with RefreshPolicy.WAIT_UNTIL update = client().prepareUpdate("test", "2") @@ -109,7 +109,7 @@ public void testUpdate() throws InterruptedException, ExecutionException { .get(); assertEquals(2, update.getVersion()); assertFalse("request shouldn't have forced a refresh", update.forcedRefresh()); - assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "cat")).get()); + assertNoSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "cat"))); } public void testBulk() { @@ -117,19 +117,19 @@ public void testBulk() { BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); bulk.add(client().prepareIndex("test").setId("1").setSource("foo", "bar")); assertBulkSuccess(bulk.get()); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); // Update by bulk with RefreshPolicy.WAIT_UNTIL bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); bulk.add(client().prepareUpdate("test", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "foo", "baz")); assertBulkSuccess(bulk.get()); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "baz")), "1"); // Delete by bulk with RefreshPolicy.WAIT_UNTIL bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); bulk.add(client().prepareDelete("test", "1")); assertBulkSuccess(bulk.get()); - assertNoSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")).get()); + assertNoSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar"))); // Update makes a noop bulk = client().prepareBulk().setRefreshPolicy(RefreshPolicy.WAIT_UNTIL); @@ -153,7 +153,7 @@ public void testNoRefreshInterval() throws InterruptedException, ExecutionExcept } assertEquals(RestStatus.CREATED, index.get().status()); assertFalse("request shouldn't have forced a refresh", index.get().forcedRefresh()); - assertSearchHits(client().prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); + assertSearchHits(prepareSearch("test").setQuery(matchQuery("foo", "bar")), "1"); } private void assertBulkSuccess(BulkResponse response) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index 56e1598bd7a15..f297b61e7087d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -108,8 +108,7 @@ public void testMaxDocsLimit() throws Exception { ); assertThat(deleteError.getMessage(), containsString("Number of documents in the index can't exceed [" + maxDocs.get() + "]")); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(new MatchAllQueryBuilder()) + SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder()) .setTrackTotalHitsUpTo(Integer.MAX_VALUE) .setSize(0) .get(); @@ -121,8 +120,7 @@ public void testMaxDocsLimit() throws Exception { internalCluster().fullRestart(); internalCluster().ensureAtLeastNumDataNodes(2); ensureGreen("test"); - searchResponse = client().prepareSearch("test") - .setQuery(new MatchAllQueryBuilder()) + searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder()) .setTrackTotalHitsUpTo(Integer.MAX_VALUE) .setSize(0) .get(); @@ -137,8 +135,7 @@ public void testMaxDocsLimitConcurrently() throws Exception { assertThat(indexingResult.numFailures, greaterThan(0)); assertThat(indexingResult.numSuccess, both(greaterThan(0)).and(lessThanOrEqualTo(maxDocs.get()))); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(new MatchAllQueryBuilder()) + SearchResponse searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder()) .setTrackTotalHitsUpTo(Integer.MAX_VALUE) .setSize(0) .get(); @@ -155,8 +152,7 @@ public void testMaxDocsLimitConcurrently() throws Exception { assertThat(indexingResult.numSuccess, equalTo(0)); } indicesAdmin().prepareRefresh("test").get(); - searchResponse = client().prepareSearch("test") - .setQuery(new MatchAllQueryBuilder()) + searchResponse = prepareSearch("test").setQuery(new MatchAllQueryBuilder()) .setTrackTotalHitsUpTo(Integer.MAX_VALUE) .setSize(0) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index c6055a295eabd..9fd1e788eca8c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -21,6 +21,7 @@ import java.io.IOException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -37,8 +38,7 @@ public void testDynamicTemplateCopyTo() throws Exception { SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values()); - SearchResponse response = client().prepareSearch("test-idx") - .setQuery(QueryBuilders.termQuery("even", true)) + SearchResponse response = prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("even", true)) .addAggregation(AggregationBuilders.terms("test").field("test_field").size(recordCount * 2).collectMode(aggCollectionMode)) .addAggregation( AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode) @@ -67,8 +67,7 @@ public void testDynamicObjectCopyTo() throws Exception { assertAcked(indicesAdmin().prepareCreate("test-idx").setMapping(mapping)); client().prepareIndex("test-idx").setId("1").setSource("foo", "bar").get(); indicesAdmin().prepareRefresh("test-idx").execute().actionGet(); - SearchResponse response = client().prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")).get(); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("root.top.child", "bar")), 1L); } private XContentBuilder createDynamicTemplateMapping() throws IOException { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java index be2cd1eeb3581..38349e14bdf05 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/DynamicMappingIT.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -49,6 +48,7 @@ import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING; import static org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.MIN_DIMS_FOR_DYNAMIC_FLOAT_MAPPING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -179,7 +179,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } }); @@ -216,7 +216,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } }); @@ -372,15 +372,17 @@ public void testBulkRequestWithDynamicTemplates() throws Exception { assertFalse(bulkResponse.hasFailures()); assertSearchHits( - client().prepareSearch("test") - .setQuery(new GeoBoundingBoxQueryBuilder("location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74))), + prepareSearch("test").setQuery( + new GeoBoundingBoxQueryBuilder("location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74)) + ), "1", "2", "4" ); assertSearchHits( - client().prepareSearch("test") - .setQuery(new GeoBoundingBoxQueryBuilder("address.location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74))), + prepareSearch("test").setQuery( + new GeoBoundingBoxQueryBuilder("address.location").setCorners(new GeoPoint(42, -72), new GeoPoint(40, -74)) + ), "3" ); } @@ -459,18 +461,9 @@ public void testDynamicRuntimeNoConflicts() { BulkResponse bulkItemResponses = client().bulk(bulkRequest).actionGet(); assertFalse(bulkItemResponses.buildFailureMessage(), bulkItemResponses.hasFailures()); - { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("one", "one")).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } - { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("one.two", 3.5)).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } - { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("one.two.three", "1")).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("one", "one")), 1); + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("one.two", 3.5)), 1); + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("one.two.three", "1")), 1); } public void testDynamicRuntimeObjectFields() { @@ -507,24 +500,10 @@ public void testDynamicRuntimeObjectFields() { BulkResponse bulkItemResponses = client().bulk(bulkRequest).actionGet(); assertFalse(bulkItemResponses.buildFailureMessage(), bulkItemResponses.hasFailures()); - { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("obj.one", 1)).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } - { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("anything", "anything")).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } - { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.one", "one")).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } - { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(new MatchQueryBuilder("obj.runtime.one.two", "1")) - .get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("obj.one", 1)), 1); + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("anything", "anything")), 1); + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.one", "one")), 1); + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.one.two", "1")), 1); Exception exception = expectThrows( DocumentParsingException.class, @@ -568,12 +547,7 @@ public void testDynamicRuntimeObjectFields() { .status() ); - { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(new MatchQueryBuilder("obj.runtime.dynamic.number", 1)) - .get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); - } + assertHitCount(prepareSearch("test").setQuery(new MatchQueryBuilder("obj.runtime.dynamic.number", 1)), 1); // a doc with the same field but a different type causes a conflict Exception e = expectThrows( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java index d6eeee2206585..25c33ee66bad4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/MultiFieldsIntegrationIT.java @@ -26,6 +26,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; @@ -47,10 +48,8 @@ public void testMultiFields() throws Exception { client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title", "multi")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.not_analyzed", "Multi fields")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(client().prepareSearch("my-index").setQuery(matchQuery("title", "multi")), 1); + assertHitCount(client().prepareSearch("my-index").setQuery(matchQuery("title.not_analyzed", "Multi fields")), 1); assertAcked(indicesAdmin().preparePutMapping("my-index").setSource(createPutMappingSource())); @@ -68,8 +67,7 @@ public void testMultiFields() throws Exception { client().prepareIndex("my-index").setId("1").setSource("title", "Multi fields").setRefreshPolicy(IMMEDIATE).get(); - searchResponse = client().prepareSearch("my-index").setQuery(matchQuery("title.uncased", "Multi")).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(client().prepareSearch("my-index").setQuery(matchQuery("title.uncased", "Multi")), 1); } @SuppressWarnings("unchecked") @@ -92,12 +90,11 @@ public void testGeoPointMultiField() throws Exception { GeoPoint point = new GeoPoint(51, 19); client().prepareIndex("my-index").setId("1").setSource("a", point.toString()).setRefreshPolicy(IMMEDIATE).get(); - SearchResponse countResponse = client().prepareSearch("my-index") - .setSize(0) + SearchResponse countResponse = prepareSearch("my-index").setSize(0) .setQuery(constantScoreQuery(geoDistanceQuery("a").point(51, 19).distance(50, DistanceUnit.KILOMETERS))) .get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); - countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.geohash())).get(); + countResponse = prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", point.geohash())).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -119,7 +116,7 @@ public void testCompletionMultiField() throws Exception { assertThat(bField.get("type").toString(), equalTo("keyword")); client().prepareIndex("my-index").setId("1").setSource("a", "complete me").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get(); + SearchResponse countResponse = prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "complete me")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -141,7 +138,7 @@ public void testIpMultiField() throws Exception { assertThat(bField.get("type").toString(), equalTo("keyword")); client().prepareIndex("my-index").setId("1").setSource("a", "127.0.0.1").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse countResponse = client().prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get(); + SearchResponse countResponse = prepareSearch("my-index").setSize(0).setQuery(matchQuery("a.b", "127.0.0.1")).get(); assertThat(countResponse.getHits().getTotalHits().value, equalTo(1L)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java index 5526343619e16..bb20ddd321d7c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/query/plugin/CustomQueryParserIT.java @@ -42,10 +42,10 @@ protected int numberOfShards() { } public void testCustomDummyQuery() { - assertHitCount(client().prepareSearch("index").setQuery(new DummyQueryBuilder()), 1L); + assertHitCount(prepareSearch("index").setQuery(new DummyQueryBuilder()), 1L); } public void testCustomDummyQueryWithinBooleanQuery() { - assertHitCount(client().prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryBuilder())), 1L); + assertHitCount(prepareSearch("index").setQuery(new BoolQueryBuilder().must(new DummyQueryBuilder())), 1L); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java index 369e6cef28970..7751d5e7783b9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java @@ -47,10 +47,10 @@ public void testZeroTermsQuery() throws ExecutionException, InterruptedException MatchPhraseQueryBuilder baseQuery = matchPhraseQuery("name", "the who").analyzer("standard_stopwords"); MatchPhraseQueryBuilder matchNoneQuery = baseQuery.zeroTermsQuery(ZeroTermsQueryOption.NONE); - assertHitCount(client().prepareSearch(INDEX).setQuery(matchNoneQuery), 0L); + assertHitCount(prepareSearch(INDEX).setQuery(matchNoneQuery), 0L); MatchPhraseQueryBuilder matchAllQuery = baseQuery.zeroTermsQuery(ZeroTermsQueryOption.ALL); - assertHitCount(client().prepareSearch(INDEX).setQuery(matchAllQuery), 2L); + assertHitCount(prepareSearch(INDEX).setQuery(matchAllQuery), 2L); } private List getIndexRequests() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java index a528116031ab3..d1122004ccce2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncIT.java @@ -23,7 +23,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentType; import java.util.ArrayList; @@ -95,21 +94,17 @@ public void testBackgroundGlobalCheckpointSync() throws Exception { if (node == other) { continue; } - final MockTransportService senderTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - node.getName() - ); - final MockTransportService receiverTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - other.getName() - ); - senderTransportService.addSendBehavior(receiverTransportService, (connection, requestId, action, request, options) -> { - if ("indices:admin/seq_no/global_checkpoint_sync[r]".equals(action)) { - throw new IllegalStateException("blocking indices:admin/seq_no/global_checkpoint_sync[r]"); - } else { - connection.sendRequest(requestId, action, request, options); - } - }); + MockTransportService.getInstance(node.getName()) + .addSendBehavior( + MockTransportService.getInstance(other.getName()), + (connection, requestId, action, request, options) -> { + if ("indices:admin/seq_no/global_checkpoint_sync[r]".equals(action)) { + throw new IllegalStateException("blocking indices:admin/seq_no/global_checkpoint_sync[r]"); + } else { + connection.sendRequest(requestId, action, request, options); + } + } + ); } } }, client -> { @@ -120,15 +115,7 @@ public void testBackgroundGlobalCheckpointSync() throws Exception { if (node == other) { continue; } - final MockTransportService senderTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - node.getName() - ); - final MockTransportService receiverTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - other.getName() - ); - senderTransportService.clearOutboundRules(receiverTransportService); + MockTransportService.getInstance(node.getName()).clearOutboundRules(MockTransportService.getInstance(other.getName())); } } }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java index c4ba69e3bcaec..c9906ccf1fbee 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/seqno/RetentionLeaseIT.java @@ -352,10 +352,7 @@ public void testRetentionLeasesSyncOnRecovery() throws Exception { Settings.builder().put(INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), TimeValue.timeValueMillis(100)) ); final Semaphore recoveriesToDisrupt = new Semaphore(scaledRandomIntBetween(0, 4)); - final MockTransportService primaryTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - primaryShardNodeName - ); + final var primaryTransportService = MockTransportService.getInstance(primaryShardNodeName); primaryTransportService.addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.FINALIZE) && recoveriesToDisrupt.tryAcquire()) { if (randomBoolean()) { @@ -432,7 +429,7 @@ public void testCanRenewRetentionLeaseUnderBlock() throws InterruptedException { ) ); } catch (final Exception e) { - failWithException(e); + fail(e); } }); @@ -488,7 +485,7 @@ public void onResponse(final ReplicationResponse replicationResponse) { @Override public void onFailure(final Exception e) { - failWithException(e); + fail(e); } }); @@ -533,7 +530,7 @@ public void testCanRenewRetentionLeaseWithoutWaitingForShards() throws Interrupt ) ); } catch (final Exception e) { - failWithException(e); + fail(e); } }); @@ -589,10 +586,6 @@ private void runWaitForShardsTest( afterSync.accept(primary); } - private static void failWithException(Exception e) { - throw new AssertionError("unexpected", e); - } - private static ActionListener countDownLatchListener(CountDownLatch latch) { return ActionTestUtils.assertNoFailureListener(r -> latch.countDown()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java index 4476bc2ca6ec0..f473015f864db 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -672,7 +672,7 @@ public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Excepti } } shard.refresh("test"); - assertThat(client().search(countRequest).actionGet().getHits().getTotalHits().value, equalTo(numDocs)); + assertHitCount(client().search(countRequest), numDocs); assertThat(shard.getLocalCheckpoint(), equalTo(shard.seqNoStats().getMaxSeqNo())); final CountDownLatch engineResetLatch = new CountDownLatch(1); @@ -701,11 +701,7 @@ public void testInvalidateIndicesRequestCacheWhenRollbackEngine() throws Excepti equalTo(numDocs + moreDocs) ); } - assertThat( - "numDocs=" + numDocs + " moreDocs=" + moreDocs, - client().search(countRequest).actionGet().getHits().getTotalHits().value, - equalTo(numDocs + moreDocs) - ); + assertHitCount(client().search(countRequest), numDocs + moreDocs); } public void testShardChangesWithDefaultDocType() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java index 345f2a82d82c4..d57cbe50074ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/RemoveCorruptedShardDataCommandIT.java @@ -255,7 +255,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { ensureGreen(indexName); - assertHitCount(client().prepareSearch(indexName).setQuery(matchAllQuery()), expectedNumDocs); + assertHitCount(prepareSearch(indexName).setQuery(matchAllQuery()), expectedNumDocs); } public void testCorruptTranslogTruncation() throws Exception { @@ -424,13 +424,13 @@ public Settings onNodeStopped(String nodeName) throws Exception { ensureYellow(indexName); // Run a search and make sure it succeeds - assertHitCount(client().prepareSearch(indexName).setQuery(matchAllQuery()), numDocsToKeep); + assertHitCount(prepareSearch(indexName).setQuery(matchAllQuery()), numDocsToKeep); logger.info("--> starting the replica node to test recovery"); internalCluster().startNode(node2PathSettings); ensureGreen(indexName); for (String node : internalCluster().nodesInclude(indexName)) { - SearchRequestBuilder q = client().prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery()); + SearchRequestBuilder q = prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery()); assertHitCount(q, numDocsToKeep); } final RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(indexName).setActiveOnly(false).get(); @@ -513,7 +513,7 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { ensureYellow(); // Run a search and make sure it succeeds - assertHitCount(client().prepareSearch(indexName).setQuery(matchAllQuery()), totalDocs); + assertHitCount(prepareSearch(indexName).setQuery(matchAllQuery()), totalDocs); // check replica corruption final RemoveCorruptedShardDataCommand command = new RemoveCorruptedShardDataCommand(); @@ -534,7 +534,7 @@ public void testCorruptTranslogTruncationOfReplica() throws Exception { internalCluster().startNode(node2PathSettings); ensureGreen(indexName); for (String node : internalCluster().nodesInclude(indexName)) { - assertHitCount(client().prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery()), totalDocs); + assertHitCount(prepareSearch(indexName).setPreference("_only_nodes:" + node).setQuery(matchAllQuery()), totalDocs); } final RecoveryResponse recoveryResponse = indicesAdmin().prepareRecoveries(indexName).setActiveOnly(false).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java index 3a10539d3c451..22bb5974ad550 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/shard/SearchIdleIT.java @@ -88,7 +88,7 @@ private void runTestAutomaticRefresh(final IntToLongFunction count) throws Inter assertFalse(indexService.getIndexSettings().isExplicitRefresh()); ensureGreen(); AtomicInteger totalNumDocs = new AtomicInteger(Integer.MAX_VALUE); - assertNoSearchHits(client().prepareSearch().get()); + assertNoSearchHits(client().prepareSearch()); int numDocs = scaledRandomIntBetween(25, 100); totalNumDocs.set(numDocs); CountDownLatch indexingDone = new CountDownLatch(numDocs); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java index 29475583dc44d..8de218f8a29c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -166,7 +166,7 @@ public void testCorruptFileAndRecover() throws InterruptedException, IOException assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).get()); assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).get()); // we have to flush at least once here since we don't corrupt the translog - assertHitCount(client().prepareSearch().setSize(0), numDocs); + assertHitCount(prepareSearch().setSize(0), numDocs); final int numShards = numShards("test"); ShardRouting corruptedShardRouting = corruptRandomPrimaryFile(); @@ -192,7 +192,7 @@ public void testCorruptFileAndRecover() throws InterruptedException, IOException assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); final int numIterations = scaledRandomIntBetween(5, 20); for (int i = 0; i < numIterations; i++) { - assertHitCount(client().prepareSearch().setSize(numDocs), numDocs); + assertHitCount(prepareSearch().setSize(numDocs), numDocs); } /* @@ -277,7 +277,7 @@ public void testCorruptPrimaryNoReplica() throws ExecutionException, Interrupted assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).get()); assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).get()); // we have to flush at least once here since we don't corrupt the translog - assertHitCount(client().prepareSearch().setSize(0), numDocs); + assertHitCount(prepareSearch().setSize(0), numDocs); ShardRouting shardRouting = corruptRandomPrimaryFile(); /* @@ -350,23 +350,20 @@ public void testCorruptionOnNetworkLayerFinalizingRecovery() throws InterruptedE final AtomicBoolean corrupt = new AtomicBoolean(true); final CountDownLatch hasCorrupted = new CountDownLatch(1); for (var dataNode : dataNodes) { - MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance( - TransportService.class, - dataNode.getName() - )); - mockTransportService.addSendBehavior( - internalCluster().getInstance(TransportService.class, unluckyNode.getName()), - (connection, requestId, action, request, options) -> { - if (corrupt.get() && action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { - RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; - byte[] array = BytesRef.deepCopyOf(req.content().toBytesRef()).bytes; - int i = randomIntBetween(0, req.content().length() - 1); - array[i] = (byte) ~array[i]; // flip one byte in the content - hasCorrupted.countDown(); + MockTransportService.getInstance(dataNode.getName()) + .addSendBehavior( + internalCluster().getInstance(TransportService.class, unluckyNode.getName()), + (connection, requestId, action, request, options) -> { + if (corrupt.get() && action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { + RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; + byte[] array = BytesRef.deepCopyOf(req.content().toBytesRef()).bytes; + int i = randomIntBetween(0, req.content().length() - 1); + array[i] = (byte) ~array[i]; // flip one byte in the content + hasCorrupted.countDown(); + } + connection.sendRequest(requestId, action, request, options); } - connection.sendRequest(requestId, action, request, options); - } - ); + ); } updateIndexSettings( @@ -411,10 +408,10 @@ public void testCorruptionOnNetworkLayer() throws InterruptedException { ensureGreen(); assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).execute().actionGet()); // we have to flush at least once here since we don't corrupt the translog - assertHitCount(client().prepareSearch().setSize(0), numDocs); + assertHitCount(prepareSearch().setSize(0), numDocs); - var source = (MockTransportService) internalCluster().getInstance(TransportService.class, primariesNode.getName()); - var target = internalCluster().getInstance(TransportService.class, unluckyNode.getName()); + final var source = MockTransportService.getInstance(primariesNode.getName()); + final var target = MockTransportService.getInstance(unluckyNode.getName()); final boolean truncate = randomBoolean(); source.addSendBehavior(target, (connection, requestId, action, request, options) -> { @@ -509,7 +506,7 @@ public void onTimeout(TimeValue timeout) { final int numIterations = scaledRandomIntBetween(5, 20); for (int i = 0; i < numIterations; i++) { - assertHitCount(client().prepareSearch().setSize(numDocs), numDocs); + assertHitCount(prepareSearch().setSize(numDocs), numDocs); } } @@ -550,7 +547,7 @@ public void testCorruptFileThenSnapshotAndRestore() throws InterruptedException, ensureGreen(); assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).execute().actionGet()); // we have to flush at least once here since we don't corrupt the translog - assertHitCount(client().prepareSearch().setSize(0), numDocs); + assertHitCount(prepareSearch().setSize(0), numDocs); ShardRouting shardRouting = corruptRandomPrimaryFile(false); logger.info("--> shard {} has a corrupted file", shardRouting); @@ -617,7 +614,7 @@ public void testReplicaCorruption() throws Exception { ensureGreen(); assertAllSuccessful(indicesAdmin().prepareFlush().setForce(true).execute().actionGet()); // we have to flush at least once here since we don't corrupt the translog - assertHitCount(client().prepareSearch().setSize(0), numDocs); + assertHitCount(prepareSearch().setSize(0), numDocs); // disable allocations of replicas post restart (the restart will change replicas to primaries, so we have // to capture replicas post restart) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java index 8731c319043a8..b8ecbc2e750af 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedTranslogIT.java @@ -97,8 +97,7 @@ public void onAllNodesStopped() throws Exception { }); assertThat( - expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch("test").setQuery(matchAllQuery()).get()) - .getMessage(), + expectThrows(SearchPhaseExecutionException.class, () -> prepareSearch("test").setQuery(matchAllQuery()).get()).getMessage(), containsString("all shards failed") ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 8b1c44703569f..19efcd9e3f31f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -39,7 +39,7 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -74,20 +74,17 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I logger.info("unlucky node: {}", unluckyNode.getNode()); // create a transport service that throws a ConnectTransportException for one bulk request and therefore triggers a retry. for (NodeStats dataNode : nodeStats.getNodes()) { - MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance( - TransportService.class, - dataNode.getNode().getName() - )); - mockTransportService.addSendBehavior( - internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), - (connection, requestId, action, request, options) -> { - connection.sendRequest(requestId, action, request, options); - if (action.equals(TransportShardBulkAction.ACTION_NAME) && exceptionThrown.compareAndSet(false, true)) { - logger.debug("Throw ConnectTransportException"); - throw new ConnectTransportException(connection.getNode(), action); + MockTransportService.getInstance(dataNode.getNode().getName()) + .addSendBehavior( + internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), + (connection, requestId, action, request, options) -> { + connection.sendRequest(requestId, action, request, options); + if (action.equals(TransportShardBulkAction.ACTION_NAME) && exceptionThrown.compareAndSet(false, true)) { + logger.debug("Throw ConnectTransportException"); + throw new ConnectTransportException(connection.getNode(), action); + } } - } - ); + ); } BulkRequestBuilder bulkBuilder = client.prepareBulk(); @@ -107,7 +104,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I } refresh(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get(); + SearchResponse searchResponse = prepareSearch("index").setSize(numDocs * 2).addStoredField("_id").get(); Set uniqueIds = new HashSet<>(); long dupCounter = 0; @@ -115,10 +112,9 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { if (uniqueIds.add(searchResponse.getHits().getHits()[i].getId()) == false) { if (found_duplicate_already == false) { - SearchResponse dupIdResponse = client().prepareSearch("index") - .setQuery(termQuery("_id", searchResponse.getHits().getHits()[i].getId())) - .setExplain(true) - .get(); + SearchResponse dupIdResponse = prepareSearch("index").setQuery( + termQuery("_id", searchResponse.getHits().getHits()[i].getId()) + ).setExplain(true).get(); assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L)); logger.info("found a duplicate id:"); for (SearchHit hit : dupIdResponse.getHits()) { @@ -130,7 +126,7 @@ public void testRetryDueToExceptionOnNetworkLayer() throws ExecutionException, I dupCounter++; } } - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(dupCounter, equalTo(0L)); assertHitCount(searchResponse, numDocs); IndicesStatsResponse index = indicesAdmin().prepareStats("index").clear().setSegments(true).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index 6c1b9a56f04cf..e3c66f3dabfdf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -55,7 +55,7 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { for (int j = 0; j < numOfChecks; j++) { try { logger.debug("running search with all types"); - SearchResponse response = client().prepareSearch("test").get(); + SearchResponse response = prepareSearch("test").get(); if (response.getHits().getTotalHits().value != numOfDocs) { final String message = "Count is " + response.getHits().getTotalHits().value @@ -74,7 +74,7 @@ public void testAutoGenerateIdNoDuplicates() throws Exception { } try { logger.debug("running search with a specific type"); - SearchResponse response = client().prepareSearch("test").get(); + SearchResponse response = prepareSearch("test").get(); if (response.getHits().getTotalHits().value != numOfDocs) { final String message = "Count is " + response.getHits().getTotalHits().value diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java index cbe7af4d05d98..211e34c99ec23 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/DateMathIndexExpressionsIntegrationIT.java @@ -87,7 +87,7 @@ public void testIndexNameDateMathExpressions() { client().prepareIndex(dateMathExp3).setId("3").setSource("{}", XContentType.JSON).get(); refresh(); - SearchResponse searchResponse = dateSensitiveGet(client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3)); + SearchResponse searchResponse = dateSensitiveGet(prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3)); assertHitCount(searchResponse, 3); assertSearchHits(searchResponse, "1", "2", "3"); @@ -144,7 +144,7 @@ public void testAutoCreateIndexWithDateMathExpression() { client().prepareIndex(dateMathExp3).setId("3").setSource("{}", XContentType.JSON).get(); refresh(); - SearchResponse searchResponse = dateSensitiveGet(client().prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3)); + SearchResponse searchResponse = dateSensitiveGet(prepareSearch(dateMathExp1, dateMathExp2, dateMathExp3)); assertHitCount(searchResponse, 3); assertSearchHits(searchResponse, "1", "2", "3"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java index 9f5138a1a0d3c..7bedd163c2530 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesOptionsIntegrationIT.java @@ -395,26 +395,20 @@ public void testWildcardBehaviourSnapshotRestore() throws Exception { public void testAllMissingLenient() throws Exception { createIndex("test1"); client().prepareIndex("test1").setId("1").setSource("k", "v").setRefreshPolicy(IMMEDIATE).get(); - assertHitCount(client().prepareSearch("test2").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchAllQuery()), 0L); - assertHitCount( - client().prepareSearch("test2", "test3").setQuery(matchAllQuery()).setIndicesOptions(IndicesOptions.lenientExpandOpen()), - 0L - ); + assertHitCount(prepareSearch("test2").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchAllQuery()), 0L); + assertHitCount(prepareSearch("test2", "test3").setQuery(matchAllQuery()).setIndicesOptions(IndicesOptions.lenientExpandOpen()), 0L); // you should still be able to run empty searches without things blowing up - assertHitCount(client().prepareSearch().setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(matchAllQuery()), 1L); } public void testAllMissingStrict() throws Exception { createIndex("test1"); - expectThrows(IndexNotFoundException.class, () -> client().prepareSearch("test2").setQuery(matchAllQuery()).execute().actionGet()); + expectThrows(IndexNotFoundException.class, () -> prepareSearch("test2").setQuery(matchAllQuery()).execute().actionGet()); - expectThrows( - IndexNotFoundException.class, - () -> client().prepareSearch("test2", "test3").setQuery(matchAllQuery()).execute().actionGet() - ); + expectThrows(IndexNotFoundException.class, () -> prepareSearch("test2", "test3").setQuery(matchAllQuery()).execute().actionGet()); // you should still be able to run empty searches without things blowing up - client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); + prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); } // For now don't handle closed indices @@ -606,7 +600,7 @@ public void testUpdateSettings() throws Exception { } static SearchRequestBuilder search(String... indices) { - return client().prepareSearch(indices).setQuery(matchAllQuery()); + return prepareSearch(indices).setQuery(matchAllQuery()); } static MultiSearchRequestBuilder msearch(IndicesOptions options, String... indices) { @@ -614,7 +608,7 @@ static MultiSearchRequestBuilder msearch(IndicesOptions options, String... indic if (options != null) { multiSearchRequestBuilder.setIndicesOptions(options); } - return multiSearchRequestBuilder.add(client().prepareSearch(indices).setQuery(matchAllQuery())); + return multiSearchRequestBuilder.add(prepareSearch(indices).setQuery(matchAllQuery())); } static ClearIndicesCacheRequestBuilder clearCache(String... indices) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 8ad81104eab2f..b10d4147af25c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -35,7 +35,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -48,7 +48,6 @@ public void testCacheAggs() throws Exception { indicesAdmin().prepareCreate("index") .setMapping("f", "type=date") .setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); indexRandom( true, @@ -67,7 +66,7 @@ public void testCacheAggs() throws Exception { dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0).calendarInterval(DateHistogramInterval.MONTH) ) .get(); - assertSearchResponse(r1); + assertNoFailures(r1); // The cached is actually used assertThat( @@ -86,7 +85,7 @@ public void testCacheAggs() throws Exception { .calendarInterval(DateHistogramInterval.MONTH) ) .get(); - assertSearchResponse(r2); + assertNoFailures(r2); Histogram h1 = r1.getAggregations().get("histo"); Histogram h2 = r2.getAggregations().get("histo"); final List buckets1 = h1.getBuckets(); @@ -110,7 +109,6 @@ public void testQueryRewrite() throws Exception { indexSettings(5, 0).put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) .put("index.number_of_routing_shards", 5) ) - .get() ); indexRandom( true, @@ -173,7 +171,6 @@ public void testQueryRewriteMissingValues() throws Exception { indicesAdmin().prepareCreate("index") .setMapping("s", "type=date") .setSettings(indexSettings(1, 0).put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); indexRandom( true, @@ -232,7 +229,6 @@ public void testQueryRewriteDates() throws Exception { indicesAdmin().prepareCreate("index") .setMapping("d", "type=date") .setSettings(indexSettings(1, 0).put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); indexRandom( true, @@ -470,7 +466,6 @@ public void testCacheWithFilteredAlias() { .setMapping("created_at", "type=date") .setSettings(settings) .addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d"))) - .get() ); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); client.prepareIndex("index").setId("1").setRouting("1").setSource("created_at", DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get(); @@ -516,7 +511,6 @@ public void testProfileDisableCache() throws Exception { indicesAdmin().prepareCreate("index") .setMapping("k", "type=keyword") .setSettings(indexSettings(1, 0).put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); indexRandom(true, client.prepareIndex("index").setSource("k", "hello")); ensureSearchable("index"); @@ -530,7 +524,7 @@ public void testProfileDisableCache() throws Exception { .setProfile(profile) .setQuery(QueryBuilders.termQuery("k", "hello")) .get(); - assertSearchResponse(resp); + assertNoFailures(resp); ElasticsearchAssertions.assertAllSuccessful(resp); assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); if (profile == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java index ce38fed6a7069..7541bce29fbe9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/ConcurrentDynamicTemplateIT.java @@ -77,8 +77,8 @@ public void onFailure(Exception e) { latch.await(); assertThat(throwable, emptyIterable()); refresh(); - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test-user")), numDocs); - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test user")), 0); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test-user")), numDocs); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.matchQuery(fieldName, "test user")), 0); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java index 6e2b989c6da8a..7a9aa7a47215a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/MalformedDynamicTemplateIT.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -54,12 +55,12 @@ public void testBWCMalformedDynamicTemplate() { Settings.builder() .put(indexSettings()) .put("number_of_shards", 1) - .put("index.version.created", IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0)) - ).setMapping(mapping).get() + .put("index.version.created", IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0)) + ).setMapping(mapping) ); client().prepareIndex(indexName).setSource("{\"foo\" : \"bar\"}", XContentType.JSON).get(); assertNoFailures((indicesAdmin().prepareRefresh(indexName)).get()); - assertHitCount(client().prepareSearch(indexName), 1); + assertHitCount(prepareSearch(indexName), 1); MapperParsingException ex = expectThrows( MapperParsingException.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index 5e1e81945c177..0eca3d689903e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -86,7 +86,7 @@ public void testDynamicUpdates() throws Exception { logger.info("checking all the documents are there"); RefreshResponse refreshResponse = indicesAdmin().prepareRefresh().execute().actionGet(); assertThat(refreshResponse.getFailedShards(), equalTo(0)); - SearchResponse response = client().prepareSearch("test").setSize(0).execute().actionGet(); + SearchResponse response = prepareSearch("test").setSize(0).execute().actionGet(); assertThat(response.getHits().getTotalHits().value, equalTo((long) recCount)); logger.info("checking all the fields are in the mappings"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 1e8f5b7f1f5e5..5958f1ad57932 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -152,7 +152,7 @@ public void testBreakerWithRandomExceptions() throws IOException, InterruptedExc } for (int i = 0; i < numSearches; i++) { - SearchRequestBuilder searchRequestBuilder = client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()); + SearchRequestBuilder searchRequestBuilder = prepareSearch().setQuery(QueryBuilders.matchAllQuery()); if (random().nextBoolean()) { searchRequestBuilder.addSort("test-str", SortOrder.ASC); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java index f82ea1d893393..cdd77d5864a7b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -105,10 +105,9 @@ public void run() { finished.set(true); indexingThread.join(); refresh("test"); - ElasticsearchAssertions.assertHitCount(client().prepareSearch("test").setTrackTotalHits(true), numAutoGenDocs.get()); + ElasticsearchAssertions.assertHitCount(prepareSearch("test").setTrackTotalHits(true), numAutoGenDocs.get()); ElasticsearchAssertions.assertHitCount( - client().prepareSearch("test") - .setTrackTotalHits(true)// extra paranoia ;) + prepareSearch("test").setTrackTotalHits(true)// extra paranoia ;) .setQuery(QueryBuilders.termQuery("auto", true)), numAutoGenDocs.get() ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index e44db1d86619a..8fe047e6cdc0a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -108,6 +108,7 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.BackgroundIndexer; @@ -394,7 +395,7 @@ public void testReplicaRecovery() throws Exception { } refresh(INDEX_NAME); - assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0), numOfDocs); + assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numOfDocs); final boolean closedIndex = randomBoolean(); if (closedIndex) { @@ -439,7 +440,7 @@ public void testReplicaRecovery() throws Exception { if (closedIndex) { assertAcked(indicesAdmin().prepareOpen(INDEX_NAME)); } - assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0), numOfDocs); + assertHitCount(prepareSearch(INDEX_NAME).setSize(0), numOfDocs); } public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exception { @@ -486,7 +487,7 @@ public void testCancelNewShardRecoveryAndUsesExistingShardCopy() throws Exceptio // hold peer recovery on phase 2 after nodeB down CountDownLatch phase1ReadyBlocked = new CountDownLatch(1); CountDownLatch allowToCompletePhase1Latch = new CountDownLatch(1); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, nodeA); + final var transportService = MockTransportService.getInstance(nodeA); transportService.addSendBehavior((connection, requestId, action, request, options) -> { if (PeerRecoveryTargetService.Actions.CLEAN_FILES.equals(action)) { phase1ReadyBlocked.countDown(); @@ -868,7 +869,7 @@ public void testSnapshotRecovery() throws Exception { RecoveryResponse response = indicesAdmin().prepareRecoveries(INDEX_NAME).execute().actionGet(); Repository repository = internalCluster().getAnyMasterNodeInstance(RepositoriesService.class).repository(REPO_NAME); - final RepositoryData repositoryData = PlainActionFuture.get(repository::getRepositoryData); + final RepositoryData repositoryData = AbstractSnapshotIntegTestCase.getRepositoryData(repository); for (Map.Entry> indexRecoveryStates : response.shardRecoveryStates().entrySet()) { assertThat(indexRecoveryStates.getKey(), equalTo(INDEX_NAME)); @@ -925,7 +926,7 @@ private IndicesStatsResponse createAndPopulateIndex(String name, int nodeCount, indexRandom(true, docs); flush(); - assertThat(client().prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(prepareSearch(name).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs)); return indicesAdmin().prepareStats(name).execute().actionGet(); } @@ -1046,11 +1047,7 @@ public void testDoNotInfinitelyWaitForMapping() { } Semaphore recoveryBlocked = new Semaphore(1); for (DiscoveryNode node : clusterService().state().nodes()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - node.getName() - ); - transportService.addSendBehavior((connection, requestId, action, request, options) -> { + MockTransportService.getInstance(node.getName()).addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(PeerRecoverySourceService.Actions.START_RECOVERY)) { if (recoveryBlocked.tryAcquire()) { PluginsService pluginService = internalCluster().getInstance(PluginsService.class, node.getName()); @@ -1076,7 +1073,7 @@ public void testDoNotInfinitelyWaitForMapping() { } indicesAdmin().prepareRefresh("test").get(); - assertHitCount(client().prepareSearch(), numDocs); + assertHitCount(prepareSearch(), numDocs); } /** Makes sure the new master does not repeatedly fetch index metadata from recovering replicas */ @@ -1088,11 +1085,10 @@ public void testOngoingRecoveryAndMasterFailOver() throws Exception { indicesAdmin().prepareCreate(indexName) .setSettings(indexSettings(1, 0).put("index.routing.allocation.include._name", nodeWithPrimary)) ); - MockTransportService transport = (MockTransportService) internalCluster().getInstance(TransportService.class, nodeWithPrimary); CountDownLatch phase1ReadyBlocked = new CountDownLatch(1); CountDownLatch allowToCompletePhase1Latch = new CountDownLatch(1); Semaphore blockRecovery = new Semaphore(1); - transport.addSendBehavior((connection, requestId, action, request, options) -> { + MockTransportService.getInstance(nodeWithPrimary).addSendBehavior((connection, requestId, action, request, options) -> { if (PeerRecoveryTargetService.Actions.CLEAN_FILES.equals(action) && blockRecovery.tryAcquire()) { phase1ReadyBlocked.countDown(); safeAwait(allowToCompletePhase1Latch); @@ -1159,8 +1155,7 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { // first try because the local recovery happens once and its stats is reset when the recovery fails. SetOnce localRecoveredOps = new SetOnce<>(); for (String node : nodes) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.addSendBehavior((connection, requestId, action, request, options) -> { + MockTransportService.getInstance(node).addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(PeerRecoverySourceService.Actions.START_RECOVERY)) { final RecoveryState recoveryState = internalCluster().getInstance(IndicesService.class, failingNode) .getShardOrNull(new ShardId(resolveIndex(indexName), 0)) @@ -1223,8 +1218,7 @@ public void testRecoverLocallyUpToGlobalCheckpoint() throws Exception { } } for (String node : nodes) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.clearAllRules(); + MockTransportService.getInstance(node).clearAllRules(); } } @@ -1641,7 +1635,6 @@ public void testAllocateEmptyPrimaryResetsGlobalCheckpoint() throws Exception { assertAcked( indicesAdmin().prepareCreate(indexName) .setSettings(indexSettings(1, 1).put(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE.getKey(), randomBoolean())) - .get() ); final List indexRequests = IntStream.range(0, between(10, 500)) .mapToObj(n -> client().prepareIndex(indexName).setSource("foo", "bar")) @@ -1651,9 +1644,7 @@ public void testAllocateEmptyPrimaryResetsGlobalCheckpoint() throws Exception { internalCluster().stopRandomDataNode(); internalCluster().stopRandomDataNode(); final String nodeWithoutData = internalCluster().startDataOnlyNode(); - assertAcked( - clusterAdmin().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeWithoutData, true)).get() - ); + assertAcked(clusterAdmin().prepareReroute().add(new AllocateEmptyPrimaryAllocationCommand(indexName, 0, nodeWithoutData, true))); internalCluster().startDataOnlyNode(randomNodeDataPathSettings); ensureGreen(); for (ShardStats shardStats : indicesAdmin().prepareStats(indexName).get().getIndex(indexName).getShards()) { @@ -1672,10 +1663,7 @@ public void testPeerRecoveryTrimsLocalTranslog() throws Exception { ClusterState clusterState = clusterAdmin().prepareState().get().getState(); DiscoveryNode nodeWithOldPrimary = clusterState.nodes() .get(clusterState.routingTable().index(indexName).shard(0).primaryShard().currentNodeId()); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeWithOldPrimary.getName() - ); + final var transportService = MockTransportService.getInstance(nodeWithOldPrimary.getName()); CountDownLatch readyToRestartNode = new CountDownLatch(1); AtomicBoolean stopped = new AtomicBoolean(); transportService.addSendBehavior((connection, requestId, action, request, options) -> { @@ -1748,14 +1736,10 @@ public void testReservesBytesDuringPeerRecoveryPhaseOne() throws Exception { ClusterState clusterState = clusterAdmin().prepareState().get().getState(); DiscoveryNode nodeWithPrimary = clusterState.nodes() .get(clusterState.routingTable().index(indexName).shard(0).primaryShard().currentNodeId()); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - nodeWithPrimary.getName() - ); final AtomicBoolean fileInfoIntercepted = new AtomicBoolean(); final AtomicBoolean fileChunkIntercepted = new AtomicBoolean(); - transportService.addSendBehavior((connection, requestId, action, request, options) -> { + MockTransportService.getInstance(nodeWithPrimary.getName()).addSendBehavior((connection, requestId, action, request, options) -> { if (action.equals(PeerRecoveryTargetService.Actions.FILES_INFO)) { if (fileInfoIntercepted.compareAndSet(false, true)) { final NodeIndicesStats nodeIndicesStats = clusterAdmin().prepareNodesStats(connection.getNode().getId()) @@ -1828,10 +1812,7 @@ public void testWaitForClusterStateToBeAppliedOnSourceNode() throws Exception { final long initialClusterStateVersion = clusterService().state().version(); try (var recoveryClusterStateDelayListeners = new RecoveryClusterStateDelayListeners(initialClusterStateVersion)) { - final var primaryNodeTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - primaryNode - ); + final var primaryNodeTransportService = MockTransportService.getInstance(primaryNode); primaryNodeTransportService.addRequestHandlingBehavior( Coordinator.COMMIT_STATE_ACTION_NAME, (handler, request, channel, task) -> { @@ -2009,7 +1990,6 @@ private void createRepository(boolean enableSnapshotPeerRecoveries) { .put(BlobStoreRepository.USE_FOR_PEER_RECOVERY_SETTING.getKey(), enableSnapshotPeerRecoveries) .put("compress", false) ) - .get() ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/ReplicaToPrimaryPromotionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/ReplicaToPrimaryPromotionIT.java index 67b885e315ae9..8595f11bae428 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/ReplicaToPrimaryPromotionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/ReplicaToPrimaryPromotionIT.java @@ -43,7 +43,7 @@ public void testPromoteReplicaToPrimary() throws Exception { refresh(indexName); } - assertHitCount(client().prepareSearch(indexName).setSize(0), numOfDocs); + assertHitCount(prepareSearch(indexName).setSize(0), numOfDocs); ensureGreen(indexName); // sometimes test with a closed index @@ -76,6 +76,6 @@ public void testPromoteReplicaToPrimary() throws Exception { assertAcked(indicesAdmin().prepareOpen(indexName)); ensureYellowAndNoInitializingShards(indexName); } - assertHitCount(client().prepareSearch(indexName).setSize(0), numOfDocs); + assertHitCount(prepareSearch(indexName).setSize(0), numOfDocs); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsServiceIT.java index ea850fb1d0007..e34d5059b4991 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/plan/ShardSnapshotsServiceIT.java @@ -49,6 +49,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.concurrent.Executor; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; @@ -104,12 +105,12 @@ public FailingRepo( } @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { if (failGetRepositoryData) { listener.onFailure(new IOException("Failure getting repository data")); return; } - super.getRepositoryData(listener); + super.getRepositoryData(responseExecutor, listener); } @Override diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java index 4631a4fb7cf41..b3e0d258cb113 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateNumberOfReplicasIT.java @@ -62,7 +62,7 @@ public void testSimpleUpdateNumberOfReplicas() throws Exception { refresh(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 10L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 10L); } final long settingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); @@ -118,7 +118,7 @@ public void testSimpleUpdateNumberOfReplicas() throws Exception { assertThat(clusterHealth.getIndices().get("test").getActiveShards(), equalTo(numShards.numPrimaries * 3)); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 10L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 10L); } logger.info("Decreasing number of replicas from 2 to 0"); @@ -141,7 +141,7 @@ public void testSimpleUpdateNumberOfReplicas() throws Exception { assertThat(clusterHealth.getIndices().get("test").getActiveShards(), equalTo(numShards.numPrimaries)); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 10); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 10); } final long afterReplicaDecreaseSettingsVersion = clusterAdmin().prepareState() @@ -466,7 +466,6 @@ public void testUpdateNumberOfReplicasAllowNoIndices() { indicesAdmin().prepareUpdateSettings("non-existent-*") .setSettings(Settings.builder().put("index.number_of_replicas", 1)) .setIndicesOptions(options) - .get() ); final int numberOfReplicas = Integer.parseInt( indicesAdmin().prepareGetSettings("test-index").get().getSetting("test-index", "index.number_of_replicas") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index c1851eb9924c8..e770127bf577c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -468,7 +468,7 @@ public void testSettingsVersion() { { final long settingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertAcked( - indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.refresh_interval", "500ms")).get() + indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.refresh_interval", "500ms")) ); final long newSettingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertThat(newSettingsVersion, equalTo(1 + settingsVersion)); @@ -476,14 +476,10 @@ public void testSettingsVersion() { { final boolean block = randomBoolean(); - assertAcked( - indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block)).get() - ); + assertAcked(indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block))); final long settingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertAcked( - indicesAdmin().prepareUpdateSettings("test") - .setSettings(Settings.builder().put("index.blocks.read_only", block == false)) - .get() + indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block == false)) ); final long newSettingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertThat(newSettingsVersion, equalTo(1 + settingsVersion)); @@ -491,7 +487,7 @@ public void testSettingsVersion() { // if the read-only block is present, remove it if (block == false) { assertAcked( - indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", false)).get() + indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", false)) ); } } @@ -505,9 +501,7 @@ public void testSettingsVersionUnchanged() { final long settingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); final String refreshInterval = indicesAdmin().prepareGetSettings("test").get().getSetting("test", "index.refresh_interval"); assertAcked( - indicesAdmin().prepareUpdateSettings("test") - .setSettings(Settings.builder().put("index.refresh_interval", refreshInterval)) - .get() + indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.refresh_interval", refreshInterval)) ); final long newSettingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertThat(newSettingsVersion, equalTo(settingsVersion)); @@ -515,21 +509,17 @@ public void testSettingsVersionUnchanged() { { final boolean block = randomBoolean(); - assertAcked( - indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block)).get() - ); + assertAcked(indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block))); // now put the same block again final long settingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); - assertAcked( - indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block)).get() - ); + assertAcked(indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", block))); final long newSettingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertThat(newSettingsVersion, equalTo(settingsVersion)); // if the read-only block is present, remove it if (block) { assertAcked( - indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", false)).get() + indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.blocks.read_only", false)) ); } } @@ -548,9 +538,7 @@ public void testNumberOfReplicasSettingsVersionUnchanged() { indicesAdmin().prepareGetSettings("test").get().getSetting("test", "index.number_of_replicas") ); assertAcked( - indicesAdmin().prepareUpdateSettings("test") - .setSettings(Settings.builder().put("index.number_of_replicas", numberOfReplicas)) - .get() + indicesAdmin().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.number_of_replicas", numberOfReplicas)) ); final long newSettingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertThat(newSettingsVersion, equalTo(settingsVersion)); @@ -571,7 +559,6 @@ public void testNumberOfReplicasSettingsVersion() { assertAcked( indicesAdmin().prepareUpdateSettings("test") .setSettings(Settings.builder().put("index.number_of_replicas", 1 + numberOfReplicas)) - .get() ); final long newSettingsVersion = clusterAdmin().prepareState().get().getState().metadata().index("test").getSettingsVersion(); assertThat(newSettingsVersion, equalTo(1 + settingsVersion)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java index fd9bfdd9e0138..91425067bd817 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseIndexIT.java @@ -132,7 +132,7 @@ public void testCloseIndex() throws Exception { assertIndexIsClosed(indexName); assertAcked(indicesAdmin().prepareOpen(indexName)); - assertHitCount(client().prepareSearch(indexName).setSize(0), nbDocs); + assertHitCount(prepareSearch(indexName).setSize(0), nbDocs); } public void testCloseAlreadyClosedIndex() throws Exception { @@ -244,7 +244,7 @@ public void testCloseWhileIndexingDocuments() throws Exception { assertIndexIsClosed(indexName); assertAcked(indicesAdmin().prepareOpen(indexName)); - assertHitCount(client().prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs); + assertHitCount(prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), nbDocs); } public void testCloseWhileDeletingIndices() throws Exception { @@ -348,10 +348,7 @@ public void testConcurrentClosesAndOpens() throws Exception { } refresh(indexName); assertIndexIsOpened(indexName); - assertHitCount( - client().prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), - indexer.totalIndexedDocs() - ); + assertHitCount(prepareSearch(indexName).setSize(0).setTrackTotalHitsUpTo(TRACK_TOTAL_HITS_ACCURATE), indexer.totalIndexedDocs()); } public void testCloseIndexWaitForActiveShards() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index b67098c8f37a7..53d3e62109536 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -31,7 +31,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.test.transport.StubbableTransport; -import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.Collection; @@ -177,15 +176,11 @@ public void testCloseWhileRelocatingShards() throws Exception { connection.sendRequest(requestId, action, request, options); }; - final MockTransportService targetTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - targetNode - ); + final var targetTransportService = MockTransportService.getInstance(targetNode); for (DiscoveryNode node : state.getNodes()) { if (node.canContainData() && node.getName().equals(targetNode) == false) { - final TransportService sourceTransportService = internalCluster().getInstance(TransportService.class, node.getName()); - targetTransportService.addSendBehavior(sourceTransportService, sendBehavior); + targetTransportService.addSendBehavior(MockTransportService.getInstance(node.getName()), sendBehavior); } } @@ -246,7 +241,7 @@ public void testCloseWhileRelocatingShards() throws Exception { ensureGreen(indices); for (String index : acknowledgedCloses) { - long docsCount = client().prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value; + long docsCount = prepareSearch(index).setSize(0).setTrackTotalHits(true).get().getHits().getTotalHits().value; assertEquals( "Expected " + docsPerIndex.get(index) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index 70ef73862016a..1ce0c0985b704 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -296,7 +296,7 @@ public void testOpenCloseWithDocs() throws IOException, ExecutionException, Inte // check the index still contains the records that we indexed indicesAdmin().prepareOpen("test").execute().get(); ensureGreen(); - SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get(); + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, docs); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java index 9c12c00ae76cd..3c16e0f2624ed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/ReopenWhileClosingIT.java @@ -127,10 +127,7 @@ private void createIndexWithDocs(final String indexName, final Collection release = new ListenableFuture<>(); for (DiscoveryNode node : internalCluster().clusterService().state().getNodes()) { mockTransportService.addSendBehavior( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java index e2ddb7c7b9957..9b763ea581187 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/SimpleIndexStateIT.java @@ -12,8 +12,6 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.open.OpenIndexResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -67,8 +65,7 @@ public void testSimpleOpenClose() { } logger.info("--> opening index..."); - OpenIndexResponse openIndexResponse = indicesAdmin().prepareOpen("test").get(); - assertThat(openIndexResponse.isAcknowledged(), equalTo(true)); + assertAcked(indicesAdmin().prepareOpen("test")); logger.info("--> waiting for green status"); ensureGreen(); @@ -139,9 +136,6 @@ public void testConsistencyAfterIndexCreationFailure() { } logger.info("--> creating test index with valid settings "); - CreateIndexResponse response = indicesAdmin().prepareCreate("test") - .setSettings(Settings.builder().put("number_of_shards", 1)) - .get(); - assertThat(response.isAcknowledged(), equalTo(true)); + assertAcked(indicesAdmin().prepareCreate("test").setSettings(Settings.builder().put("number_of_shards", 1))); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 494d9c4b0a6f3..a5a9ca2862a0e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -131,7 +131,6 @@ public void testFieldDataStats() { indicesAdmin().prepareCreate("test") .setSettings(settingsBuilder().put("index.number_of_shards", 2)) .setMapping("field", "type=text,fielddata=true", "field2", "type=text,fielddata=true") - .get() ); ensureGreen(); client().prepareIndex("test").setId("1").setSource("field", "value1", "field2", "value1").execute().actionGet(); @@ -151,8 +150,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data... - client().prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet(); - client().prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet(); + prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet(); + prepareSearch().addSort("field", SortOrder.ASC).execute().actionGet(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); assertThat( @@ -167,8 +166,8 @@ public void testFieldDataStats() { assertThat(indicesStats.getTotal().getFieldData().getMemorySizeInBytes(), greaterThan(0L)); // sort to load it to field data... - client().prepareSearch().addSort("field2", SortOrder.ASC).execute().actionGet(); - client().prepareSearch().addSort("field2", SortOrder.ASC).execute().actionGet(); + prepareSearch().addSort("field2", SortOrder.ASC).execute().actionGet(); + prepareSearch().addSort("field2", SortOrder.ASC).execute().actionGet(); // now check the per field stats nodesStats = clusterAdmin().prepareNodesStats("data:true") @@ -236,7 +235,6 @@ public void testClearAllCaches() throws Exception { indicesAdmin().prepareCreate("test") .setSettings(settingsBuilder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) .setMapping("field", "type=text,fielddata=true") - .get() ); ensureGreen(); clusterAdmin().prepareHealth().setWaitForGreenStatus().execute().actionGet(); @@ -272,16 +270,8 @@ public void testClearAllCaches() throws Exception { assertThat(indicesStats.getTotal().getQueryCache().getMemorySizeInBytes(), equalTo(0L)); // sort to load it to field data and filter to load filter cache - client().prepareSearch() - .setPostFilter(QueryBuilders.termQuery("field", "value1")) - .addSort("field", SortOrder.ASC) - .execute() - .actionGet(); - client().prepareSearch() - .setPostFilter(QueryBuilders.termQuery("field", "value2")) - .addSort("field", SortOrder.ASC) - .execute() - .actionGet(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value1")).addSort("field", SortOrder.ASC).execute().actionGet(); + prepareSearch().setPostFilter(QueryBuilders.termQuery("field", "value2")).addSort("field", SortOrder.ASC).execute().actionGet(); nodesStats = clusterAdmin().prepareNodesStats("data:true").setIndices(true).execute().actionGet(); assertThat( @@ -334,7 +324,6 @@ public void testQueryCache() throws Exception { assertAcked( indicesAdmin().prepareCreate("idx") .setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); ensureGreen(); @@ -374,7 +363,7 @@ public void testQueryCache() throws Exception { assertThat(indicesAdmin().prepareStats("idx").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(), equalTo(0L)); for (int i = 0; i < 10; i++) { assertThat( - client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, + prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs) ); assertThat( @@ -409,7 +398,7 @@ public void testQueryCache() throws Exception { for (int i = 0; i < 10; i++) { assertThat( - client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, + prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs) ); assertThat( @@ -427,8 +416,7 @@ public void testQueryCache() throws Exception { // test explicit request parameter assertThat( - client().prepareSearch("idx") - .setSearchType(SearchType.QUERY_THEN_FETCH) + prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setRequestCache(false) .get() @@ -442,8 +430,7 @@ public void testQueryCache() throws Exception { ); assertThat( - client().prepareSearch("idx") - .setSearchType(SearchType.QUERY_THEN_FETCH) + prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setRequestCache(true) .get() @@ -462,7 +449,7 @@ public void testQueryCache() throws Exception { updateIndexSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), false), "idx"); assertThat( - client().prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, + prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0).get().getHits().getTotalHits().value, equalTo((long) numDocs) ); assertThat( @@ -471,8 +458,7 @@ public void testQueryCache() throws Exception { ); assertThat( - client().prepareSearch("idx") - .setSearchType(SearchType.QUERY_THEN_FETCH) + prepareSearch("idx").setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setRequestCache(true) .get() @@ -1013,7 +999,7 @@ public void testGroupsParam() throws Exception { client().prepareIndex("test1").setId(Integer.toString(1)).setSource("foo", "bar").execute().actionGet(); refresh(); - client().prepareSearch("_all").setStats("bar", "baz").execute().actionGet(); + prepareSearch("_all").setStats("bar", "baz").execute().actionGet(); IndicesStatsRequestBuilder builder = indicesAdmin().prepareStats(); IndicesStatsResponse stats = builder.execute().actionGet(); @@ -1163,9 +1149,7 @@ public void testFilterCacheStats() throws Exception { // the query cache has an optimization that disables it automatically if there is contention, // so we run it in an assertBusy block which should eventually succeed assertBusy(() -> { - assertNoFailures( - client().prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz"))) - ); + assertNoFailures(prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz")))); IndicesStatsResponse stats = indicesAdmin().prepareStats("index").setQueryCache(true).get(); assertCumulativeQueryCacheStats(stats); assertThat(stats.getTotal().queryCache.getHitCount(), equalTo(0L)); @@ -1174,9 +1158,7 @@ public void testFilterCacheStats() throws Exception { }); assertBusy(() -> { - assertNoFailures( - client().prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz"))) - ); + assertNoFailures(prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz")))); IndicesStatsResponse stats = indicesAdmin().prepareStats("index").setQueryCache(true).get(); assertCumulativeQueryCacheStats(stats); assertThat(stats.getTotal().queryCache.getHitCount(), greaterThan(0L)); @@ -1224,9 +1206,7 @@ public void testFilterCacheStats() throws Exception { ); assertBusy(() -> { - assertNoFailures( - client().prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz"))) - ); + assertNoFailures(prepareSearch("index").setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.matchQuery("foo", "baz")))); IndicesStatsResponse stats = indicesAdmin().prepareStats("index").setQueryCache(true).get(); assertCumulativeQueryCacheStats(stats); assertThat(stats.getTotal().queryCache.getHitCount(), greaterThan(0L)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index be441c5439fbc..ca749eeaef545 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -42,7 +42,6 @@ import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportMessageListener; -import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.nio.file.Files; @@ -155,11 +154,10 @@ public static BlockClusterStateProcessing relocateAndBlockCompletion( ) throws InterruptedException { BlockClusterStateProcessing disruption = new BlockClusterStateProcessing(nodeTo, random()); internalCluster().setDisruptionScheme(disruption); - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, nodeTo); CountDownLatch beginRelocationLatch = new CountDownLatch(1); CountDownLatch receivedShardExistsRequestLatch = new CountDownLatch(1); // use a tracer on the target node to track relocation start and end - transportService.addMessageListener(new TransportMessageListener() { + MockTransportService.getInstance(nodeTo).addMessageListener(new TransportMessageListener() { @Override public void onRequestReceived(long requestId, String action) { if (action.equals(PeerRecoveryTargetService.Actions.FILES_INFO)) { @@ -213,17 +211,16 @@ public void testShardCleanupIfShardDeletionAfterRelocationFailedAndIndexDeleted( // add a transport delegate that will prevent the shard active request to succeed the first time after relocation has finished. // node_1 will then wait for the next cluster state change before it tries a next attempt to delete the shard. - MockTransportService transportServiceNode_1 = (MockTransportService) internalCluster().getInstance(TransportService.class, node_1); - TransportService transportServiceNode_2 = internalCluster().getInstance(TransportService.class, node_2); final CountDownLatch shardActiveRequestSent = new CountDownLatch(1); - transportServiceNode_1.addSendBehavior(transportServiceNode_2, (connection, requestId, action, request, options) -> { - if (action.equals("internal:index/shard/exists") && shardActiveRequestSent.getCount() > 0) { - shardActiveRequestSent.countDown(); - logger.info("prevent shard active request from being sent"); - throw new ConnectTransportException(connection.getNode(), "DISCONNECT: simulated"); - } - connection.sendRequest(requestId, action, request, options); - }); + MockTransportService.getInstance(node_1) + .addSendBehavior(MockTransportService.getInstance(node_2), (connection, requestId, action, request, options) -> { + if (action.equals("internal:index/shard/exists") && shardActiveRequestSent.getCount() > 0) { + shardActiveRequestSent.countDown(); + logger.info("prevent shard active request from being sent"); + throw new ConnectTransportException(connection.getNode(), "DISCONNECT: simulated"); + } + connection.sendRequest(requestId, action, request, options); + }); logger.info("--> move shard from {} to {}, and wait for relocation to finish", node_1, node_2); internalCluster().client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, node_1, node_2)).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 76e3ad8ef74d4..359b90a351b60 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -152,8 +152,7 @@ public void testSimpleIndexTemplateTests() throws Exception { client().prepareIndex("test_index").setId("1").setSource("field1", "value1", "field2", "value 2").setRefreshPolicy(IMMEDIATE).get(); ensureGreen(); - SearchResponse searchResponse = client().prepareSearch("test_index") - .setQuery(termQuery("field1", "value1")) + SearchResponse searchResponse = prepareSearch("test_index").setQuery(termQuery("field1", "value1")) .addStoredField("field1") .addStoredField("field2") .execute() @@ -168,8 +167,7 @@ public void testSimpleIndexTemplateTests() throws Exception { ensureGreen(); // now only match on one template (template_1) - searchResponse = client().prepareSearch("text_index") - .setQuery(termQuery("field1", "value1")) + searchResponse = prepareSearch("text_index").setQuery(termQuery("field1", "value1")) .addStoredField("field1") .addStoredField("field2") .execute() @@ -512,16 +510,16 @@ public void testIndexTemplateWithAliases() throws Exception { refresh(); - assertHitCount(client().prepareSearch("test_index"), 5L); - assertHitCount(client().prepareSearch("simple_alias"), 5L); - assertHitCount(client().prepareSearch("templated_alias-test_index"), 5L); + assertHitCount(prepareSearch("test_index"), 5L); + assertHitCount(prepareSearch("simple_alias"), 5L); + assertHitCount(prepareSearch("templated_alias-test_index"), 5L); - SearchResponse searchResponse = client().prepareSearch("filtered_alias").get(); + SearchResponse searchResponse = prepareSearch("filtered_alias").get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap().get("type"), equalTo("type2")); // Search the complex filter alias - searchResponse = client().prepareSearch("complex_filtered_alias").get(); + searchResponse = prepareSearch("complex_filtered_alias").get(); assertHitCount(searchResponse, 3L); Set types = new HashSet<>(); @@ -558,9 +556,9 @@ public void testIndexTemplateWithAliasesInSource() { client().prepareIndex("test_index").setId("2").setSource("field", "value2").get(); refresh(); - assertHitCount(client().prepareSearch("test_index"), 2L); + assertHitCount(prepareSearch("test_index"), 2L); - SearchResponse searchResponse = client().prepareSearch("my_alias").get(); + SearchResponse searchResponse = prepareSearch("my_alias").get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value2")); } @@ -593,10 +591,10 @@ public void testIndexTemplateWithAliasesSource() { client().prepareIndex("test_index").setId("2").setSource("field", "value2").get(); refresh(); - assertHitCount(client().prepareSearch("test_index"), 2L); - assertHitCount(client().prepareSearch("alias1"), 2L); + assertHitCount(prepareSearch("test_index"), 2L); + assertHitCount(prepareSearch("alias1"), 2L); - SearchResponse searchResponse = client().prepareSearch("alias2").get(); + SearchResponse searchResponse = prepareSearch("alias2").get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value2")); } @@ -813,7 +811,6 @@ public void testOrderAndVersion() { .setVersion(version) .setOrder(order) .setMapping("field", "type=text") - .get() ); GetIndexTemplatesResponse response = indicesAdmin().prepareGetTemplates("versioned_template").get(); @@ -852,8 +849,7 @@ public void testMultipleTemplate() throws IOException { ensureGreen(); // ax -> matches template - SearchResponse searchResponse = client().prepareSearch("ax") - .setQuery(termQuery("field1", "value1")) + SearchResponse searchResponse = prepareSearch("ax").setQuery(termQuery("field1", "value1")) .addStoredField("field1") .addStoredField("field2") .execute() @@ -864,8 +860,7 @@ public void testMultipleTemplate() throws IOException { assertNull(searchResponse.getHits().getAt(0).field("field2")); // bx -> matches template - searchResponse = client().prepareSearch("bx") - .setQuery(termQuery("field1", "value1")) + searchResponse = prepareSearch("bx").setQuery(termQuery("field1", "value1")) .addStoredField("field1") .addStoredField("field2") .execute() @@ -917,7 +912,6 @@ public void testPartitionedTemplate() throws Exception { indicesAdmin().preparePutTemplate("just_partitions") .setPatterns(Collections.singletonList("te*")) .setSettings(Settings.builder().put("index.routing_partition_size", "6")) - .get() ); // create an index with too few shards diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java index f8d2b0e464b72..fa4d4c0fbb669 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestAsyncProcessorIT.java @@ -13,32 +13,18 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentType; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.function.BiConsumer; -import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; @@ -85,23 +71,8 @@ public static class TestPlugin extends Plugin implements IngestPlugin { private ThreadPool threadPool; @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - this.threadPool = threadPool; + public Collection createComponents(PluginServices services) { + this.threadPool = services.threadPool(); return List.of(); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/FullRollingRestartIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/FullRollingRestartIT.java index fa8b782ff9305..f4aa261b09625 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/FullRollingRestartIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/FullRollingRestartIT.java @@ -95,7 +95,7 @@ public void testFullRollingRestart() throws Exception { logger.info("--> refreshing and checking data"); refresh(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2000L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2000L); } // now start shutting nodes down @@ -124,7 +124,7 @@ public void testFullRollingRestart() throws Exception { logger.info("--> stopped two nodes, verifying data"); refresh(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2000L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2000L); } // closing the 3rd node @@ -154,7 +154,7 @@ public void testFullRollingRestart() throws Exception { logger.info("--> one node left, verifying data"); refresh(); for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch().setSize(0).setQuery(matchAllQuery()), 2000L); + assertHitCount(prepareSearch().setSize(0).setQuery(matchAllQuery()), 2000L); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index a05d9bffa48fd..bfd16adaa405b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -319,8 +319,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, SearchResponse[] iterationResults = new SearchResponse[iterations]; boolean error = false; for (int i = 0; i < iterations; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setSize((int) numberOfDocs) + SearchResponse searchResponse = prepareSearch().setSize((int) numberOfDocs) .setQuery(matchAllQuery()) .setTrackTotalHits(true) .addSort("id", SortOrder.ASC) @@ -370,11 +369,7 @@ private void iterateAssertCount(final int numberOfShards, final int iterations, assertBusy(() -> { boolean errorOccurred = false; for (int i = 0; i < iterations; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setTrackTotalHits(true) - .setSize(0) - .setQuery(matchAllQuery()) - .get(); + SearchResponse searchResponse = prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()).get(); if (searchResponse.getHits().getTotalHits().value != numberOfDocs) { errorOccurred = true; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index a18015da0737a..9e04413bfb014 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -27,6 +27,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -79,7 +80,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.in; @@ -132,7 +133,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().execute().actionGet(); - assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); + assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); logger.info("--> start another node"); final String node_2 = internalCluster().startNode(); @@ -156,7 +157,7 @@ public void testSimpleRelocationNoIndexing() { logger.info("--> verifying count again..."); indicesAdmin().prepareRefresh().execute().actionGet(); - assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); + assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); } public void testRelocationWhileIndexingRandom() throws Exception { @@ -235,8 +236,7 @@ public void testRelocationWhileIndexingRandom() throws Exception { boolean ranOnce = false; for (int i = 0; i < 10; i++) { logger.info("--> START search test round {}", i + 1); - SearchHits hits = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchHits hits = prepareSearch("test").setQuery(matchAllQuery()) .setSize((int) indexer.totalIndexedDocs()) .storedFields() .execute() @@ -404,7 +404,7 @@ public void testCancellationCleansTempFiles() throws Exception { logger.info("--> blocking recoveries from primary (allowed failures: [{}])", allowedFailures); CountDownLatch corruptionCount = new CountDownLatch(allowedFailures); ClusterService clusterService = internalCluster().getInstance(ClusterService.class, p_node); - MockTransportService mockTransportService = (MockTransportService) internalCluster().getInstance(TransportService.class, p_node); + final var mockTransportService = MockTransportService.getInstance(p_node); for (DiscoveryNode node : clusterService.state().nodes()) { if (node.equals(clusterService.localNode()) == false) { mockTransportService.addSendBehavior( @@ -485,7 +485,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { for (int i = 0; i < searchThreads.length; i++) { searchThreads[i] = new Thread(() -> { while (stopped.get() == false) { - assertNoFailures(client().prepareSearch("test").setRequestCache(false)); + assertNoFailures(prepareSearch("test").setRequestCache(false)); } }); searchThreads[i].start(); @@ -500,7 +500,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { docs[i] = client().prepareIndex("test").setId(id).setSource("field1", English.intToEnglish(i)); } indexRandom(true, docs); - assertHitCount(client().prepareSearch("test"), numDocs); + assertHitCount(prepareSearch("test"), numDocs); logger.info(" --> moving index to new nodes"); updateIndexSettings( @@ -522,9 +522,7 @@ public void testIndexSearchAndRelocateConcurrently() throws Exception { final int numIters = randomIntBetween(10, 20); for (int i = 0; i < numIters; i++) { logger.info(" --> checking iteration {}", i); - SearchResponse afterRelocation = client().prepareSearch().setSize(ids.size()).get(); - assertNoFailures(afterRelocation); - assertSearchHits(afterRelocation, ids.toArray(new String[ids.size()])); + assertSearchHitsWithoutFailures(prepareSearch().setSize(ids.size()), ids.toArray(Strings.EMPTY_ARRAY)); } stopped.set(true); for (Thread searchThread : searchThreads) { @@ -581,7 +579,7 @@ public void testRelocateWhileWaitingForRefresh() { logger.info("--> verifying count"); indicesAdmin().prepareRefresh().execute().actionGet(); - assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); + assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(20L)); } public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { @@ -651,7 +649,7 @@ public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws E assertTrue(pendingIndexResponses.stream().allMatch(ActionFuture::isDone)); }, 1, TimeUnit.MINUTES); - assertThat(client().prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L)); + assertThat(prepareSearch("test").setSize(0).execute().actionGet().getHits().getTotalHits().value, equalTo(120L)); } public void testRelocationEstablishedPeerRecoveryRetentionLeases() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java index ba845d3525df6..6281df7fc6646 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/TruncatedRecoveryIT.java @@ -93,7 +93,7 @@ public void testCancelRecoveryAndResume() throws Exception { indexRandom(true, builder); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); - assertHitCount(client().prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)), 1); + assertHitCount(prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)), 1); } ensureGreen(); // ensure we have flushed segments and make them a big one via optimize @@ -104,24 +104,21 @@ public void testCancelRecoveryAndResume() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final AtomicBoolean truncate = new AtomicBoolean(true); for (NodeStats dataNode : dataNodeStats) { - MockTransportService mockTransportService = ((MockTransportService) internalCluster().getInstance( - TransportService.class, - dataNode.getNode().getName() - )); - mockTransportService.addSendBehavior( - internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), - (connection, requestId, action, request, options) -> { - if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { - RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; - logger.info("file chunk [{}] lastChunk: {}", req, req.lastChunk()); - if ((req.name().endsWith("cfs") || req.name().endsWith("fdt")) && req.lastChunk() && truncate.get()) { - latch.countDown(); - throw new RuntimeException("Caused some truncated files for fun and profit"); + MockTransportService.getInstance(dataNode.getNode().getName()) + .addSendBehavior( + internalCluster().getInstance(TransportService.class, unluckyNode.getNode().getName()), + (connection, requestId, action, request, options) -> { + if (action.equals(PeerRecoveryTargetService.Actions.FILE_CHUNK)) { + RecoveryFileChunkRequest req = (RecoveryFileChunkRequest) request; + logger.info("file chunk [{}] lastChunk: {}", req, req.lastChunk()); + if ((req.name().endsWith("cfs") || req.name().endsWith("fdt")) && req.lastChunk() && truncate.get()) { + latch.countDown(); + throw new RuntimeException("Caused some truncated files for fun and profit"); + } } + connection.sendRequest(requestId, action, request, options); } - connection.sendRequest(requestId, action, request, options); - } - ); + ); } logger.info("--> bumping replicas to 1"); // @@ -143,7 +140,7 @@ public void testCancelRecoveryAndResume() throws Exception { ensureGreen("test"); for (int i = 0; i < numDocs; i++) { String id = Integer.toString(i); - assertHitCount(client().prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)), 1); + assertHitCount(prepareSearch().setQuery(QueryBuilders.termQuery("the_id", id)), 1); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java index b77428c6f6396..be8053a1d6866 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/IndexSnapshotsServiceIT.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; @@ -113,7 +114,7 @@ public void testGetShardSnapshotReturnsTheLatestSuccessfulSnapshot() throws Exce final boolean useBwCFormat = randomBoolean(); if (useBwCFormat) { - final IndexVersion version = randomVersionBetween(random(), IndexVersion.V_7_5_0, IndexVersion.current()); + final IndexVersion version = randomVersionBetween(random(), IndexVersions.V_7_5_0, IndexVersion.current()); initWithSnapshotVersion(repoName, repoPath, version); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/repositories/RepositoriesServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/repositories/RepositoriesServiceIT.java index a5bb89670389c..76f3ca328d222 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/repositories/RepositoriesServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/repositories/RepositoriesServiceIT.java @@ -46,9 +46,7 @@ public void testUpdateRepository() { final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath()); - assertAcked( - client.admin().cluster().preparePutRepository(repositoryName).setType(FsRepository.TYPE).setSettings(repoSettings).get() - ); + assertAcked(client.admin().cluster().preparePutRepository(repositoryName).setType(FsRepository.TYPE).setSettings(repoSettings)); final GetRepositoriesResponse originalGetRepositoriesResponse = client.admin() .cluster() @@ -66,9 +64,7 @@ public void testUpdateRepository() { final boolean updated = randomBoolean(); final String updatedRepositoryType = updated ? "mock" : FsRepository.TYPE; - assertAcked( - client.admin().cluster().preparePutRepository(repositoryName).setType(updatedRepositoryType).setSettings(repoSettings).get() - ); + assertAcked(client.admin().cluster().preparePutRepository(repositoryName).setType(updatedRepositoryType).setSettings(repoSettings)); final GetRepositoriesResponse updatedGetRepositoriesResponse = client.admin() .cluster() @@ -86,8 +82,6 @@ public void testUpdateRepository() { // check that a noop update does not verify. Since the new data node does not share the same `path.repo`, verification will fail if // it runs. internalCluster().startDataOnlyNode(Settings.builder().put(Environment.PATH_REPO_SETTING.getKey(), createTempDir()).build()); - assertAcked( - client.admin().cluster().preparePutRepository(repositoryName).setType(updatedRepositoryType).setSettings(repoSettings).get() - ); + assertAcked(client.admin().cluster().preparePutRepository(repositoryName).setType(updatedRepositoryType).setSettings(repoSettings)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasResolveRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasResolveRoutingIT.java index 70c306a08d600..478cae8746f86 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasResolveRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasResolveRoutingIT.java @@ -43,10 +43,7 @@ public void testSearchClosedWildcardIndex() throws ExecutionException, Interrupt ); refresh("test-*"); assertHitCount( - client().prepareSearch() - .setIndices("alias-*") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setQuery(queryStringQuery("quick")), + prepareSearch().setIndices("alias-*").setIndicesOptions(IndicesOptions.lenientExpandOpen()).setQuery(queryStringQuery("quick")), 3L ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index f6abb5939e54b..7ee081ffd433e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -121,7 +121,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, + prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L) ); } @@ -129,8 +129,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("1") + prepareSearch().setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -140,8 +139,7 @@ public void testAliasSearchRouting() throws Exception { ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -152,18 +150,12 @@ public void testAliasSearchRouting() throws Exception { ); assertThat( - client().prepareSearch("alias1") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L) ); assertThat( - client().prepareSearch("alias1") - .setSize(0) + prepareSearch("alias1").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -177,8 +169,7 @@ public void testAliasSearchRouting() throws Exception { for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("0") + prepareSearch().setRouting("0") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -187,8 +178,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(1L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting("0") .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -198,17 +188,11 @@ public void testAliasSearchRouting() throws Exception { equalTo(1L) ); assertThat( - client().prepareSearch("alias0") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L) ); assertThat( - client().prepareSearch("alias0") - .setSize(0) + prepareSearch("alias0").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -224,17 +208,11 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, + prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); assertThat( - client().prepareSearch() - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); } @@ -242,8 +220,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with 0 routing, should find one"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("0") + prepareSearch().setRouting("0") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -252,8 +229,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(1L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting("0") .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -263,17 +239,11 @@ public void testAliasSearchRouting() throws Exception { equalTo(1L) ); assertThat( - client().prepareSearch("alias0") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias0").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L) ); assertThat( - client().prepareSearch("alias0") - .setSize(0) + prepareSearch("alias0").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -286,8 +256,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with 1 routing, should find one"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("1") + prepareSearch().setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -296,8 +265,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(1L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -307,17 +275,11 @@ public void testAliasSearchRouting() throws Exception { equalTo(1L) ); assertThat( - client().prepareSearch("alias1") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias1").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L) ); assertThat( - client().prepareSearch("alias1") - .setSize(0) + prepareSearch("alias1").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -330,8 +292,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with 0,1 indexRoutings , should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("0", "1") + prepareSearch().setRouting("0", "1") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -340,8 +301,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(2L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting("0", "1") .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -351,17 +311,11 @@ public void testAliasSearchRouting() throws Exception { equalTo(2L) ); assertThat( - client().prepareSearch("alias01") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias01").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); assertThat( - client().prepareSearch("alias01") - .setSize(0) + prepareSearch("alias01").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -374,8 +328,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with two routing aliases , should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("alias0", "alias1") - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() .getHits() @@ -383,8 +336,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(2L) ); assertThat( - client().prepareSearch("alias0", "alias1") - .setSize(0) + prepareSearch("alias0", "alias1").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -397,8 +349,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with alias0, alias1 and alias01, should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("alias0", "alias1", "alias01") - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("alias0", "alias1", "alias01").setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() .getHits() @@ -406,8 +357,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(2L) ); assertThat( - client().prepareSearch("alias0", "alias1", "alias01") - .setSize(0) + prepareSearch("alias0", "alias1", "alias01").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -420,8 +370,7 @@ public void testAliasSearchRouting() throws Exception { logger.info("--> search with test, alias0 and alias1, should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("test", "alias0", "alias1") - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("test", "alias0", "alias1").setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() .getHits() @@ -429,8 +378,7 @@ public void testAliasSearchRouting() throws Exception { equalTo(2L) ); assertThat( - client().prepareSearch("test", "alias0", "alias1") - .setSize(0) + prepareSearch("test", "alias0", "alias1").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -488,8 +436,7 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-a1,alias-b0, should not find"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("alias-a1", "alias-b0") - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("alias-a1", "alias-b0").setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() .getHits() @@ -497,8 +444,7 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { equalTo(0L) ); assertThat( - client().prepareSearch("alias-a1", "alias-b0") - .setSize(0) + prepareSearch("alias-a1", "alias-b0").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -511,17 +457,11 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-ab, should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("alias-ab") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias-ab").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); assertThat( - client().prepareSearch("alias-ab") - .setSize(0) + prepareSearch("alias-ab").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -534,8 +474,7 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { logger.info("--> search with alias-a0,alias-b1 should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("alias-a0", "alias-b1") - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("alias-a0", "alias-b1").setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() .getHits() @@ -543,8 +482,7 @@ public void testAliasSearchRoutingWithTwoIndices() throws Exception { equalTo(2L) ); assertThat( - client().prepareSearch("alias-a0", "alias-b1") - .setSize(0) + prepareSearch("alias-a0", "alias-b1").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -574,12 +512,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue2682() thro logger.info("--> search all on index_* should find two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("index_*") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("index_*").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); } @@ -602,8 +535,7 @@ public void testAliasSearchRoutingWithConcreteAndAliasedIndices_issue3268() thro logger.info("--> indexing on index_2 which is a concrete index"); client().prepareIndex("index_2").setId("2").setSource("field", "value2").setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - SearchResponse searchResponse = client().prepareSearch("index_*") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(1) .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -630,17 +562,11 @@ public void testIndexingAliasesOverTime() throws Exception { for (int i = 0; i < 5; i++) { assertThat(client().prepareGet("test", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); assertThat( - client().prepareSearch("alias") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L) ); assertThat( - client().prepareSearch("alias") - .setSize(0) + prepareSearch("alias").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -656,17 +582,11 @@ public void testIndexingAliasesOverTime() throws Exception { logger.info("--> verifying search with wrong routing should not find"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch("alias") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(0L) ); assertThat( - client().prepareSearch("alias") - .setSize(0) + prepareSearch("alias").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -691,17 +611,11 @@ public void testIndexingAliasesOverTime() throws Exception { assertThat(client().prepareGet("test", "0").setRouting("3").execute().actionGet().isExists(), equalTo(true)); assertThat(client().prepareGet("test", "1").setRouting("4").execute().actionGet().isExists(), equalTo(true)); assertThat( - client().prepareSearch("alias") - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch("alias").setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); assertThat( - client().prepareSearch("alias") - .setSize(0) + prepareSearch("alias").setSize(0) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 72729172d6c52..4b685ca2699be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -145,8 +145,7 @@ private void verifyRoutedSearches(String index, Map> routing String routing = routingEntry.getKey(); int expectedDocuments = routingEntry.getValue().size(); - SearchResponse response = client().prepareSearch() - .setQuery(QueryBuilders.termQuery("_routing", routing)) + SearchResponse response = prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)) .setRouting(routing) .setIndices(index) .setSize(100) @@ -183,8 +182,7 @@ private void verifyBroadSearches(String index, Map> routingT String routing = routingEntry.getKey(); int expectedDocuments = routingEntry.getValue().size(); - SearchResponse response = client().prepareSearch() - .setQuery(QueryBuilders.termQuery("_routing", routing)) + SearchResponse response = prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)) .setIndices(index) .setSize(100) .execute() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java index f1417d2f230ad..93b1ac68be6a5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/SimpleRoutingIT.java @@ -138,7 +138,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine one"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, + prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(1L) ); } @@ -146,8 +146,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with wrong routing, should not find"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("1") + prepareSearch().setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -156,8 +155,7 @@ public void testSimpleSearchRouting() { equalTo(0L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -171,8 +169,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with correct routing, should find"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting(routingValue) + prepareSearch().setRouting(routingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -181,8 +178,7 @@ public void testSimpleSearchRouting() { equalTo(1L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting(routingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -205,17 +201,11 @@ public void testSimpleSearchRouting() { logger.info("--> search with no routing, should fine two"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, + prepareSearch().setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); assertThat( - client().prepareSearch() - .setSize(0) - .setQuery(QueryBuilders.matchAllQuery()) - .execute() - .actionGet() - .getHits() - .getTotalHits().value, + prepareSearch().setSize(0).setQuery(QueryBuilders.matchAllQuery()).execute().actionGet().getHits().getTotalHits().value, equalTo(2L) ); } @@ -223,8 +213,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with {} routing, should find one", routingValue); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting(routingValue) + prepareSearch().setRouting(routingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -233,8 +222,7 @@ public void testSimpleSearchRouting() { equalTo(1L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting(routingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -248,8 +236,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with {} routing, should find one", secondRoutingValue); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting("1") + prepareSearch().setRouting("1") .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -258,8 +245,7 @@ public void testSimpleSearchRouting() { equalTo(1L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting(secondRoutingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -273,8 +259,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with {},{} indexRoutings , should find two", routingValue, "1"); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting(routingValue, secondRoutingValue) + prepareSearch().setRouting(routingValue, secondRoutingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -283,8 +268,7 @@ public void testSimpleSearchRouting() { equalTo(2L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting(routingValue, secondRoutingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() @@ -298,8 +282,7 @@ public void testSimpleSearchRouting() { logger.info("--> search with {},{},{} indexRoutings , should find two", routingValue, secondRoutingValue, routingValue); for (int i = 0; i < 5; i++) { assertThat( - client().prepareSearch() - .setRouting(routingValue, secondRoutingValue, routingValue) + prepareSearch().setRouting(routingValue, secondRoutingValue, routingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() .actionGet() @@ -308,8 +291,7 @@ public void testSimpleSearchRouting() { equalTo(2L) ); assertThat( - client().prepareSearch() - .setSize(0) + prepareSearch().setSize(0) .setRouting(routingValue, secondRoutingValue, routingValue) .setQuery(QueryBuilders.matchAllQuery()) .execute() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 233c492e54b7e..05fa9dc66928c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -65,9 +65,9 @@ public void testCancellationDuringQueryPhase() throws Exception { indexTestData(); logger.info("Executing search"); - ActionFuture searchResponse = client().prepareSearch("test") - .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) - .execute(); + ActionFuture searchResponse = prepareSearch("test").setQuery( + scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())) + ).execute(); awaitForBlock(plugins); cancelSearch(SearchAction.NAME); @@ -82,9 +82,10 @@ public void testCancellationDuringFetchPhase() throws Exception { indexTestData(); logger.info("Executing search"); - ActionFuture searchResponse = client().prepareSearch("test") - .addScriptField("test_field", new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap())) - .execute(); + ActionFuture searchResponse = prepareSearch("test").addScriptField( + "test_field", + new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()) + ).execute(); awaitForBlock(plugins); cancelSearch(SearchAction.NAME); @@ -110,8 +111,7 @@ public void testCancellationDuringAggregation() throws Exception { termsAggregationBuilder.field("field.keyword"); } - ActionFuture searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + ActionFuture searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addAggregation( termsAggregationBuilder.subAggregation( new ScriptedMetricAggregationBuilder("sub_agg").initScript( @@ -144,8 +144,7 @@ public void testCancellationOfScrollSearches() throws Exception { indexTestData(); logger.info("Executing search"); - ActionFuture searchResponse = client().prepareSearch("test") - .setScroll(TimeValue.timeValueSeconds(10)) + ActionFuture searchResponse = prepareSearch("test").setScroll(TimeValue.timeValueSeconds(10)) .setSize(5) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) .execute(); @@ -171,8 +170,7 @@ public void testCancellationOfScrollSearchesOnFollowupRequests() throws Exceptio logger.info("Executing search"); TimeValue keepAlive = TimeValue.timeValueSeconds(5); - SearchResponse searchResponse = client().prepareSearch("test") - .setScroll(keepAlive) + SearchResponse searchResponse = prepareSearch("test").setScroll(keepAlive) .setSize(2) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) .get(); @@ -209,11 +207,10 @@ public void testCancelMultiSearch() throws Exception { indexTestData(); ActionFuture msearchResponse = client().prepareMultiSearch() .add( - client().prepareSearch("test") - .addScriptField( - "test_field", - new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()) - ) + prepareSearch("test").addScriptField( + "test_field", + new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()) + ) ) .execute(); awaitForBlock(plugins); @@ -244,8 +241,7 @@ public void testCancelFailedSearchWhenPartialResultDisallowed() throws Exception Thread searchThread = new Thread(() -> { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + () -> prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SEARCH_BLOCK_SCRIPT_NAME, Collections.emptyMap()))) .setAllowPartialSearchResults(false) .setSize(1000) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java index 732f61ad9d151..000dccdee34c6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchServiceCleanupOnLostMasterIT.java @@ -16,7 +16,6 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.hamcrest.Matchers; import java.util.Collection; @@ -48,11 +47,8 @@ public void testMasterRestart() throws Exception { public void testDroppedOutNode() throws Exception { testLostMaster((master, dataNode) -> { - final MockTransportService masterTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - master - ); - final TransportService dataTransportService = internalCluster().getInstance(TransportService.class, dataNode); + final var masterTransportService = MockTransportService.getInstance(master); + final var dataTransportService = MockTransportService.getInstance(dataNode); masterTransportService.addFailToSendNoConnectRule(dataTransportService, FollowersChecker.FOLLOWER_CHECK_ACTION_NAME); assertBusy(() -> { @@ -73,7 +69,7 @@ private void testLostMaster(CheckedBiConsumer loseMas index("test", "test", "{}"); - assertThat(client().prepareSearch("test").setScroll("30m").get().getScrollId(), is(notNullValue())); + assertThat(prepareSearch("test").setScroll("30m").get().getScrollId(), is(notNullValue())); loseMaster.accept(master, dataNode); // in the past, this failed because the search context for the scroll would prevent the shard lock from being released. diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index 008e164dd265d..1bcf2d8fb327f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -55,8 +55,7 @@ private void indexDocs() { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98369") public void testTopHitsTimeout() { indexDocs(); - SearchResponse searchResponse = client().prepareSearch("test") - .setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) + SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) .get(); assertThat(searchResponse.isTimedOut(), equalTo(true)); @@ -71,8 +70,7 @@ public void testTopHitsTimeout() { @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98053") public void testAggsTimeout() { indexDocs(); - SearchResponse searchResponse = client().prepareSearch("test") - .setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) + SearchResponse searchResponse = prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) .setSize(0) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) .addAggregation(new TermsAggregationBuilder("terms").field("field.keyword")) @@ -96,8 +94,7 @@ public void testPartialResultsIntolerantTimeout() throws Exception { ElasticsearchException ex = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch("test") - .setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) + () -> prepareSearch("test").setTimeout(new TimeValue(10, TimeUnit.MILLISECONDS)) .setQuery(scriptQuery(new Script(ScriptType.INLINE, "mockscript", SCRIPT_NAME, Collections.emptyMap()))) .setAllowPartialSearchResults(false) // this line causes timeouts to report failures .get() diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java index b452b4da6e2d4..3202037c8486f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchWithRejectionsIT.java @@ -48,7 +48,7 @@ public void testOpenContextsAfterRejections() throws Exception { SearchType searchType = randomFrom(SearchType.DEFAULT, SearchType.QUERY_THEN_FETCH, SearchType.DFS_QUERY_THEN_FETCH); logger.info("search type is {}", searchType); for (int i = 0; i < numSearches; i++) { - responses[i] = client().prepareSearch().setQuery(matchAllQuery()).setSearchType(searchType).execute(); + responses[i] = prepareSearch().setQuery(matchAllQuery()).setSearchType(searchType).execute(); } for (int i = 0; i < numSearches; i++) { try { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/StressSearchServiceReaperIT.java index d09fb6e32e40f..eec815d6957aa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/StressSearchServiceReaperIT.java @@ -9,7 +9,6 @@ import org.apache.lucene.tests.util.English; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESIntegTestCase; @@ -19,8 +18,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; @ClusterScope(scope = SUITE) public class StressSearchServiceReaperIT extends ESIntegTestCase { @@ -45,9 +43,7 @@ public void testStressReaper() throws ExecutionException, InterruptedException { indexRandom(true, builders); final int iterations = scaledRandomIntBetween(500, 1000); for (int i = 0; i < iterations; i++) { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).setSize(num).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, num); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(matchAllQuery()).setSize(num), num); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java index d6b9cb0ac267c..cc74dcc3d0d28 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/AggregationsIntegrationIT.java @@ -19,7 +19,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @ESIntegTestCase.SuiteScopeTestCase public class AggregationsIntegrationIT extends ESIntegTestCase { @@ -39,12 +39,11 @@ public void setupSuiteScopeCluster() throws Exception { public void testScroll() { final int size = randomIntBetween(1, 4); - SearchResponse response = client().prepareSearch("index") - .setSize(size) + SearchResponse response = prepareSearch("index").setSize(size) .setScroll(TimeValue.timeValueMinutes(1)) .addAggregation(terms("f").field("f")) .get(); - assertSearchResponse(response); + assertNoFailures(response); Aggregations aggregations = response.getAggregations(); assertNotNull(aggregations); Terms terms = aggregations.get("f"); @@ -53,7 +52,7 @@ public void testScroll() { int total = response.getHits().getHits().length; while (response.getHits().getHits().length > 0) { response = client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); - assertSearchResponse(response); + assertNoFailures(response); assertNull(response.getAggregations()); total += response.getHits().getHits().length; } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index 683ccb975b9fb..a0144d30a4728 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -24,7 +24,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.missing; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.is; @@ -61,12 +61,11 @@ public void testMultipleAggsOnSameField_WithDifferentRequiredValueSourceType() t ensureSearchable(); SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values()); - SearchResponse response = client().prepareSearch("idx") - .addAggregation(missing("missing_values").field("value")) + SearchResponse response = prepareSearch("idx").addAggregation(missing("missing_values").field("value")) .addAggregation(terms("values").field("value").collectMode(aggCollectionMode)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Aggregations aggs = response.getAggregations(); @@ -109,11 +108,9 @@ public void testSubAggregationForTopAggregationOnUnmappedField() throws Exceptio ensureSearchable("idx"); SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values()); - SearchResponse searchResponse = client().prepareSearch("idx") - .addAggregation( - histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) - ) - .get(); + SearchResponse searchResponse = prepareSearch("idx").addAggregation( + histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) + ).get(); assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(0L)); Histogram values = searchResponse.getAggregations().get("values"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 4352c0962c7df..ea896c73f8882 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -57,7 +57,6 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.notNullValue; @@ -153,7 +152,7 @@ public void testRandomRanges() throws Exception { } } - SearchRequestBuilder reqBuilder = client().prepareSearch("idx").addAggregation(query); + SearchRequestBuilder reqBuilder = prepareSearch("idx").addAggregation(query); for (int i = 0; i < ranges.length; ++i) { RangeQueryBuilder filter = QueryBuilders.rangeQuery("values"); if (ranges[i][0] != Double.NEGATIVE_INFINITY) { @@ -255,13 +254,12 @@ public void testDuelTerms() throws Exception { assertNoFailures(indicesAdmin().prepareRefresh("idx").setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get()); - SearchResponse resp = client().prepareSearch("idx") - .addAggregation( - terms("long").field("long_values") - .size(maxNumTerms) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(min("min").field("num")) - ) + SearchResponse resp = prepareSearch("idx").addAggregation( + terms("long").field("long_values") + .size(maxNumTerms) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(min("min").field("num")) + ) .addAggregation( terms("double").field("double_values") .size(maxNumTerms) @@ -357,17 +355,14 @@ public void testDuelTermsHistogram() throws Exception { Map params = new HashMap<>(); params.put("interval", interval); - SearchResponse resp = client().prepareSearch("idx") - .addAggregation( - terms("terms").field("values") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params)) - .size(maxNumTerms) - ) - .addAggregation(histogram("histo").field("values").interval(interval).minDocCount(1)) - .get(); + SearchResponse resp = prepareSearch("idx").addAggregation( + terms("terms").field("values") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / interval)", params)) + .size(maxNumTerms) + ).addAggregation(histogram("histo").field("values").interval(interval).minDocCount(1)).get(); - assertSearchResponse(resp); + assertNoFailures(resp); Terms terms = resp.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -403,13 +398,11 @@ public void testLargeNumbersOfPercentileBuckets() throws Exception { } indexRandom(true, indexingRequests); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").field("double_value") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(percentiles("pcts").field("double_value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").field("double_value") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(percentiles("pcts").field("double_value")) + ).get(); assertAllSuccessful(response); assertEquals(numDocs, response.getHits().getTotalHits().value); } @@ -419,15 +412,13 @@ public void testReduce() throws Exception { createIndex("idx"); final int value = randomIntBetween(0, 10); indexRandom(true, client().prepareIndex("idx").setSource("f", value)); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filter("filter", QueryBuilders.matchAllQuery()).subAggregation( - range("range").field("f").addUnboundedTo(6).addUnboundedFrom(6).subAggregation(sum("sum").field("f")) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + filter("filter", QueryBuilders.matchAllQuery()).subAggregation( + range("range").field("f").addUnboundedTo(6).addUnboundedFrom(6).subAggregation(sum("sum").field("f")) ) - .get(); + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filter filter = response.getAggregations().get("filter"); assertNotNull(filter); @@ -482,30 +473,26 @@ public void testDuelDepthBreadthFirst() throws Exception { } indexRandom(true, reqs); - final SearchResponse r1 = client().prepareSearch("idx") - .addAggregation( - terms("f1").field("f1") - .collectMode(SubAggCollectionMode.DEPTH_FIRST) - .subAggregation( - terms("f2").field("f2") - .collectMode(SubAggCollectionMode.DEPTH_FIRST) - .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST)) - ) - ) - .get(); - assertSearchResponse(r1); - final SearchResponse r2 = client().prepareSearch("idx") - .addAggregation( - terms("f1").field("f1") - .collectMode(SubAggCollectionMode.BREADTH_FIRST) - .subAggregation( - terms("f2").field("f2") - .collectMode(SubAggCollectionMode.BREADTH_FIRST) - .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST)) - ) - ) - .get(); - assertSearchResponse(r2); + final SearchResponse r1 = prepareSearch("idx").addAggregation( + terms("f1").field("f1") + .collectMode(SubAggCollectionMode.DEPTH_FIRST) + .subAggregation( + terms("f2").field("f2") + .collectMode(SubAggCollectionMode.DEPTH_FIRST) + .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.DEPTH_FIRST)) + ) + ).get(); + assertNoFailures(r1); + final SearchResponse r2 = prepareSearch("idx").addAggregation( + terms("f1").field("f1") + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .subAggregation( + terms("f2").field("f2") + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .subAggregation(terms("f3").field("f3").collectMode(SubAggCollectionMode.BREADTH_FIRST)) + ) + ).get(); + assertNoFailures(r2); final Terms t1 = r1.getAggregations().get("f1"); final Terms t2 = r2.getAggregations().get("f1"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java index f5283b979c722..b255a7b5f9bb6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MetadataIT.java @@ -22,7 +22,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.maxBucket; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; public class MetadataIT extends ESIntegTestCase { @@ -39,14 +39,11 @@ public void testMetadataSetOnAggregationResult() throws Exception { final var nestedMetadata = Map.of("nested", "value"); var metadata = Map.of("key", "value", "numeric", 1.2, "bool", true, "complex", nestedMetadata); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value")) - ) - .addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("the_terms").setMetadata(metadata).field("name").subAggregation(sum("the_sum").setMetadata(metadata).field("value")) + ).addAggregation(maxBucket("the_max_bucket", "the_terms>the_sum").setMetadata(metadata)).get(); - assertSearchResponse(response); + assertNoFailures(response); Aggregations aggs = response.getAggregations(); assertNotNull(aggs); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java index 96636b57f2774..8b7f566750042 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/MissingValueIT.java @@ -30,7 +30,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.closeTo; @ESIntegTestCase.SuiteScopeTestCase @@ -54,10 +54,8 @@ protected void setupSuiteScopeCluster() throws Exception { } public void testUnmappedTerms() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("my_terms").field("non_existing_field").missing("bar")) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("non_existing_field").missing("bar")).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); assertEquals(2, terms.getBucketByKey("bar").getDocCount()); @@ -65,17 +63,17 @@ public void testUnmappedTerms() { public void testStringTerms() { for (ExecutionMode mode : ExecutionMode.values()) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms("my_terms").field("str").executionHint(mode.toString()).missing("bar")) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("my_terms").field("str").executionHint(mode.toString()).missing("bar") + ).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); assertEquals(1, terms.getBucketByKey("foo").getDocCount()); assertEquals(1, terms.getBucketByKey("bar").getDocCount()); - response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); - assertSearchResponse(response); + response = prepareSearch("idx").addAggregation(terms("my_terms").field("str").missing("foo")).get(); + assertNoFailures(response); terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); assertEquals(2, terms.getBucketByKey("foo").getDocCount()); @@ -83,40 +81,40 @@ public void testStringTerms() { } public void testLongTerms() { - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(4)).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); assertEquals(1, terms.getBucketByKey("3").getDocCount()); assertEquals(1, terms.getBucketByKey("4").getDocCount()); - response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)).get(); - assertSearchResponse(response); + response = prepareSearch("idx").addAggregation(terms("my_terms").field("long").missing(3)).get(); + assertNoFailures(response); terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); assertEquals(2, terms.getBucketByKey("3").getDocCount()); } public void testDoubleTerms() { - SearchResponse response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(4.5)).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); assertEquals(1, terms.getBucketByKey("4.5").getDocCount()); assertEquals(1, terms.getBucketByKey("5.5").getDocCount()); - response = client().prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(5.5)).get(); - assertSearchResponse(response); + response = prepareSearch("idx").addAggregation(terms("my_terms").field("double").missing(5.5)).get(); + assertNoFailures(response); terms = response.getAggregations().get("my_terms"); assertEquals(1, terms.getBuckets().size()); assertEquals(2, terms.getBucketByKey("5.5").getDocCount()); } public void testUnmappedHistogram() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("my_histogram").field("non-existing_field").interval(5).missing(12)) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("my_histogram").field("non-existing_field").interval(5).missing(12) + ).get(); + assertNoFailures(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); assertEquals(10d, histogram.getBuckets().get(0).getKey()); @@ -124,10 +122,8 @@ public void testUnmappedHistogram() { } public void testHistogram() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(7)).get(); + assertNoFailures(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(2, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); @@ -135,8 +131,8 @@ public void testHistogram() { assertEquals(5d, histogram.getBuckets().get(1).getKey()); assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - response = client().prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get(); - assertSearchResponse(response); + response = prepareSearch("idx").addAggregation(histogram("my_histogram").field("long").interval(5).missing(3)).get(); + assertNoFailures(response); histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); @@ -144,10 +140,10 @@ public void testHistogram() { } public void testDateHistogram() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07")) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2014-05-07") + ).get(); + assertNoFailures(response); Histogram histogram = response.getAggregations().get("my_histogram"); assertEquals(2, histogram.getBuckets().size()); assertEquals("2014-01-01T00:00:00.000Z", histogram.getBuckets().get(0).getKeyAsString()); @@ -155,10 +151,10 @@ public void testDateHistogram() { assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(1).getKeyAsString()); assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - response = client().prepareSearch("idx") - .addAggregation(dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07")) - .get(); - assertSearchResponse(response); + response = prepareSearch("idx").addAggregation( + dateHistogram("my_histogram").field("date").calendarInterval(DateHistogramInterval.YEAR).missing("2015-05-07") + ).get(); + assertNoFailures(response); histogram = response.getAggregations().get("my_histogram"); assertEquals(1, histogram.getBuckets().size()); assertEquals("2015-01-01T00:00:00.000Z", histogram.getBuckets().get(0).getKeyAsString()); @@ -166,34 +162,30 @@ public void testDateHistogram() { } public void testCardinality() { - SearchResponse response = client().prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(cardinality("card").field("long").missing(2)).get(); + assertNoFailures(response); Cardinality cardinality = response.getAggregations().get("card"); assertEquals(2, cardinality.getValue()); } public void testPercentiles() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(percentiles("percentiles").field("long").missing(1000)) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(percentiles("percentiles").field("long").missing(1000)).get(); + assertNoFailures(response); Percentiles percentiles = response.getAggregations().get("percentiles"); assertEquals(1000, percentiles.percentile(100), 0); } public void testStats() { - SearchResponse response = client().prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(stats("stats").field("long").missing(5)).get(); + assertNoFailures(response); Stats stats = response.getAggregations().get("stats"); assertEquals(2, stats.getCount()); assertEquals(4, stats.getAvg(), 0); } public void testUnmappedGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")).get(); + assertNoFailures(response); GeoBounds bounds = response.getAggregations().get("bounds"); assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5)); @@ -202,8 +194,8 @@ public void testUnmappedGeoBounds() { } public void testGeoBounds() { - SearchResponse response = client().prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get(); + assertNoFailures(response); GeoBounds bounds = response.getAggregations().get("bounds"); assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5)); @@ -212,10 +204,8 @@ public void testGeoBounds() { } public void testGeoCentroid() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(geoCentroid("centroid").field("location").missing("2,1")) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation(geoCentroid("centroid").field("location").missing("2,1")).get(); + assertNoFailures(response); GeoCentroid centroid = response.getAggregations().get("centroid"); GeoPoint point = new GeoPoint(1.5, 1.5); assertThat(point.getY(), closeTo(centroid.centroid().getY(), 1E-5)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java index 795a72310cb2f..0af496d83f9db 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/BooleanTermsIT.java @@ -15,7 +15,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms; import org.elasticsearch.test.ESIntegTestCase; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -73,13 +73,11 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSingleValueField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -107,13 +105,11 @@ public void testSingleValueField() throws Exception { } public void testMultiValueField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -141,15 +137,13 @@ public void testMultiValueField() throws Exception { } public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .size(between(1, 5)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .size(between(1, 5)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(response); UnmappedTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index 62a8a5c9dee98..920fd79401cc6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -60,8 +60,8 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -225,11 +225,11 @@ private static String getBucketKeyAsString(ZonedDateTime key, ZoneId tz) { } public void testSingleValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -260,17 +260,11 @@ public void testSingleValuedField() throws Exception { } public void testSingleValuedFieldWithTimeZone() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.DAY) - .minDocCount(1) - .timeZone(ZoneId.of("+01:00")) - ) - .execute() - .actionGet(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(ZoneId.of("+01:00")) + ).execute().actionGet(); ZoneId tz = ZoneId.of("+01:00"); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -328,12 +322,10 @@ public void testSingleValued_timeZone_epoch() throws Exception { format = format + "||date_optional_time"; } ZoneId tz = ZoneId.of("+01:00"); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(tz).format(format) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.DAY).minDocCount(1).timeZone(tz).format(format) + ).get(); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -362,11 +354,11 @@ public void testSingleValued_timeZone_epoch() throws Exception { } public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(true))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(true)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -382,13 +374,11 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { } public void testSingleValuedFieldOrderedByKeyDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(false)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.key(false)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -403,13 +393,11 @@ public void testSingleValuedFieldOrderedByKeyDesc() throws Exception { } public void testSingleValuedFieldOrderedByCountAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(true)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(true)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -424,13 +412,11 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception { } public void testSingleValuedFieldOrderedByCountDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -445,13 +431,11 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception { } public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).subAggregation(sum("sum").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).subAggregation(sum("sum").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -504,16 +488,14 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { } public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("sum", true)) - .subAggregation(max("sum").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("sum", true)) + .subAggregation(max("sum").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -528,16 +510,14 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { } public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("sum", false)) - .subAggregation(max("sum").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("sum", false)) + .subAggregation(max("sum").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -552,16 +532,14 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception } public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("stats", "sum", false)) - .subAggregation(stats("stats").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("stats", "sum", false)) + .subAggregation(stats("stats").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -576,16 +554,14 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws } public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("max_constant", randomBoolean())) + .subAggregation(max("max_constant").field("constant")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -602,18 +578,16 @@ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { - client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.MONTH) - .order(BucketOrder.aggregation("inner_histo>avg", asc)) - .subAggregation( - dateHistogram("inner_histo").calendarInterval(DateHistogramInterval.MONTH) - .field("dates") - .subAggregation(avg("avg").field("value")) - ) - ) - .get(); + prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.MONTH) + .order(BucketOrder.aggregation("inner_histo>avg", asc)) + .subAggregation( + dateHistogram("inner_histo").calendarInterval(DateHistogramInterval.MONTH) + .field("dates") + .subAggregation(avg("avg").field("value")) + ) + ).get(); fail("Expected an exception"); } catch (SearchPhaseExecutionException e) { ElasticsearchException[] rootCauses = e.guessRootCauses(); @@ -633,15 +607,13 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) - .calendarInterval(DateHistogramInterval.MONTH) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -682,11 +654,11 @@ public void testSingleValuedFieldWithValueScript() throws Exception { */ public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -724,13 +696,11 @@ public void testMultiValuedField() throws Exception { } public void testMultiValuedFieldOrderedByCountDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("dates").calendarInterval(DateHistogramInterval.MONTH).order(BucketOrder.count(false)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -773,15 +743,13 @@ public void testMultiValuedFieldOrderedByCountDesc() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "dates"); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("dates") - .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) - .calendarInterval(DateHistogramInterval.MONTH) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("dates") + .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -829,14 +797,12 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testScriptSingleValue() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "date"); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) - .calendarInterval(DateHistogramInterval.MONTH) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -869,14 +835,12 @@ public void testScriptSingleValue() throws Exception { public void testScriptMultiValued() throws Exception { Map params = new HashMap<>(); params.put("fieldname", "dates"); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) - .calendarInterval(DateHistogramInterval.MONTH) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)) + .calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -923,11 +887,11 @@ public void testScriptMultiValued() throws Exception { */ public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -936,11 +900,11 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -971,8 +935,7 @@ public void testPartiallyUnmapped() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -1010,8 +973,7 @@ public void testSingleValueWithTimeZone() throws Exception { } indexRandom(true, reqs); - SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery()) .addAggregation( dateHistogram("date_histo").field("date") .timeZone(ZoneId.of("-02:00")) @@ -1106,16 +1068,14 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { SearchResponse response = null; try { - response = client().prepareSearch("idx2") - .addAggregation( - dateHistogram("histo").field("date") - .fixedInterval(DateHistogramInterval.days(interval)) - .minDocCount(0) - // when explicitly specifying a format, the extended bounds should be defined by the same format - .extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern))) - .format(pattern) - ) - .get(); + response = prepareSearch("idx2").addAggregation( + dateHistogram("histo").field("date") + .fixedInterval(DateHistogramInterval.days(interval)) + .minDocCount(0) + // when explicitly specifying a format, the extended bounds should be defined by the same format + .extendedBounds(new LongBounds(format(boundsMin, pattern), format(boundsMax, pattern))) + .format(pattern) + ).get(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); @@ -1130,7 +1090,7 @@ public void testSingleValueFieldWithExtendedBounds() throws Exception { throw e; } } - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -1175,10 +1135,9 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { SearchResponse response = null; // retrieve those docs with the same time zone and extended bounds - response = client().prepareSearch(index) - .setQuery( - QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()) - ) + response = prepareSearch(index).setQuery( + QueryBuilders.rangeQuery("date").from("now/d").to("now/d").includeLower(true).includeUpper(true).timeZone(timezone.getId()) + ) .addAggregation( dateHistogram("histo").field("date") .calendarInterval(DateHistogramInterval.hours(1)) @@ -1187,7 +1146,7 @@ public void testSingleValueFieldWithExtendedBoundsTimezone() throws Exception { .extendedBounds(new LongBounds("now/d", "now/d+23h")) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat( "Expected 24 buckets for one day aggregation with hourly interval", @@ -1236,16 +1195,14 @@ public void testSingleValueFieldWithExtendedBoundsOffset() throws Exception { SearchResponse response = null; // retrieve those docs with the same time zone and extended bounds - response = client().prepareSearch(index) - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.days(1)) - .offset("+6h") - .minDocCount(0) - .extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z")) - ) - .get(); - assertSearchResponse(response); + response = prepareSearch(index).addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.days(1)) + .offset("+6h") + .minDocCount(0) + .extendedBounds(new LongBounds("2016-01-01T06:00:00Z", "2016-01-08T08:00:00Z")) + ).get(); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -1293,8 +1250,7 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception } indexRandom(true, reqs); - SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery()) .addAggregation(dateHistogram("date_histo").field("date").calendarInterval(DateHistogramInterval.DAY)) .get(); @@ -1313,16 +1269,11 @@ public void testSingleValueWithMultipleDateFormatsFromMapping() throws Exception } public void testIssue6965() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .timeZone(ZoneId.of("+01:00")) - .calendarInterval(DateHistogramInterval.MONTH) - .minDocCount(0) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").timeZone(ZoneId.of("+01:00")).calendarInterval(DateHistogramInterval.MONTH).minDocCount(0) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); ZoneId tz = ZoneId.of("+01:00"); @@ -1362,15 +1313,13 @@ public void testDSTBoundaryIssue9491() throws InterruptedException, ExecutionExc client().prepareIndex("test9491").setSource("d", "2014-11-08T13:00:00Z") ); ensureSearchable("test9491"); - SearchResponse response = client().prepareSearch("test9491") - .addAggregation( - dateHistogram("histo").field("d") - .calendarInterval(DateHistogramInterval.YEAR) - .timeZone(ZoneId.of("Asia/Jerusalem")) - .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test9491").addAggregation( + dateHistogram("histo").field("d") + .calendarInterval(DateHistogramInterval.YEAR) + .timeZone(ZoneId.of("Asia/Jerusalem")) + .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") + ).get(); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2014-01-01T00:00:00.000+02:00")); @@ -1386,16 +1335,14 @@ public void testIssue8209() throws InterruptedException, ExecutionException { client().prepareIndex("test8209").setSource("d", "2014-04-30T00:00:00Z") ); ensureSearchable("test8209"); - SearchResponse response = client().prepareSearch("test8209") - .addAggregation( - dateHistogram("histo").field("d") - .calendarInterval(DateHistogramInterval.MONTH) - .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") - .timeZone(ZoneId.of("CET")) - .minDocCount(0) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test8209").addAggregation( + dateHistogram("histo").field("d") + .calendarInterval(DateHistogramInterval.MONTH) + .format("yyyy-MM-dd'T'HH:mm:ss.SSSXXXXX") + .timeZone(ZoneId.of("CET")) + .minDocCount(0) + ).get(); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(4)); assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2014-01-01T00:00:00.000+01:00")); @@ -1423,16 +1370,14 @@ public void testFormatIndexUnmapped() throws InterruptedException, ExecutionExce indexRandom(true, client().prepareIndex(indexDateUnmapped).setSource("foo", "bar")); ensureSearchable(indexDateUnmapped); - SearchResponse response = client().prepareSearch(indexDateUnmapped) - .addAggregation( - dateHistogram("histo").field("dateField") - .calendarInterval(DateHistogramInterval.MONTH) - .format("yyyy-MM") - .minDocCount(0) - .extendedBounds(new LongBounds("2018-01", "2018-01")) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch(indexDateUnmapped).addAggregation( + dateHistogram("histo").field("dateField") + .calendarInterval(DateHistogramInterval.MONTH) + .format("yyyy-MM") + .minDocCount(0) + .extendedBounds(new LongBounds("2018-01", "2018-01")) + ).get(); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2018-01")); @@ -1450,26 +1395,22 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, assertAcked(indicesAdmin().prepareCreate(index).setMapping("d", "type=date,format=epoch_millis").get()); indexRandom(true, client().prepareIndex(index).setSource("d", "1477954800000")); ensureSearchable(index); - SearchResponse response = client().prepareSearch(index) - .addAggregation( - dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch(index).addAggregation( + dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("Europe/Berlin")) + ).get(); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); - response = client().prepareSearch(index) - .addAggregation( - dateHistogram("histo").field("d") - .calendarInterval(DateHistogramInterval.MONTH) - .timeZone(ZoneId.of("Europe/Berlin")) - .format("yyyy-MM-dd") - ) - .get(); - assertSearchResponse(response); + response = prepareSearch(index).addAggregation( + dateHistogram("histo").field("d") + .calendarInterval(DateHistogramInterval.MONTH) + .timeZone(ZoneId.of("Europe/Berlin")) + .format("yyyy-MM-dd") + ).get(); + assertNoFailures(response); histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("2016-11-01")); @@ -1486,8 +1427,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, * "2015-10-25T04:00:00.000+01:00". */ public void testDSTEndTransition() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(new MatchNoneQueryBuilder()) + SearchResponse response = prepareSearch("idx").setQuery(new MatchNoneQueryBuilder()) .addAggregation( dateHistogram("histo").field("date") .timeZone(ZoneId.of("Europe/Oslo")) @@ -1516,8 +1456,7 @@ public void testDSTEndTransition() throws Exception { equalTo(3600000L) ); - response = client().prepareSearch("idx") - .setQuery(new MatchNoneQueryBuilder()) + response = prepareSearch("idx").setQuery(new MatchNoneQueryBuilder()) .addAggregation( dateHistogram("histo").field("date") .timeZone(ZoneId.of("Europe/Oslo")) @@ -1555,7 +1494,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); String date = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(1, 1)); String date2 = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format(date(2, 1)); @@ -1578,15 +1516,14 @@ public void testScriptCaching() throws Exception { // Test that a request using a nondeterministic script does not get cached Map params = new HashMap<>(); params.put("fieldname", "d"); - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( dateHistogram("histo").field("d") .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params)) .calendarInterval(DateHistogramInterval.MONTH) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1598,15 +1535,14 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( dateHistogram("histo").field("d") .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.LONG_PLUS_ONE_MONTH, params)) .calendarInterval(DateHistogramInterval.MONTH) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1618,11 +1554,10 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(dateHistogram("histo").field("d").calendarInterval(DateHistogramInterval.MONTH)) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1676,17 +1611,15 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) { ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new); - SearchResponse response = client().prepareSearch("sort_idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.DAY) - .order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d")) - ) - .get(); + SearchResponse response = prepareSearch("sort_idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histogram = response.getAggregations().get("histo"); assertThat(histogram, notNullValue()); @@ -1722,15 +1655,12 @@ public void testDateNanosHistogram() throws Exception { indexRandom(true, client().prepareIndex("nanos").setId("2").setSource("date", "2000-01-02")); // Search interval 24 hours - SearchResponse r = client().prepareSearch("nanos") - .addAggregation( - dateHistogram("histo").field("date") - .fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)) - .timeZone(ZoneId.of("Europe/Berlin")) - ) - .addDocValueField("date") - .get(); - assertSearchResponse(r); + SearchResponse r = prepareSearch("nanos").addAggregation( + dateHistogram("histo").field("date") + .fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)) + .timeZone(ZoneId.of("Europe/Berlin")) + ).addDocValueField("date").get(); + assertNoFailures(r); Histogram histogram = r.getAggregations().get("histo"); List buckets = histogram.getBuckets(); @@ -1740,13 +1670,10 @@ public void testDateNanosHistogram() throws Exception { assertEquals(946767600000L, ((ZonedDateTime) buckets.get(1).getKey()).toEpochSecond() * 1000); assertEquals(1, buckets.get(1).getDocCount()); - r = client().prepareSearch("nanos") - .addAggregation( - dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("UTC")) - ) - .addDocValueField("date") - .get(); - assertSearchResponse(r); + r = prepareSearch("nanos").addAggregation( + dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.seconds(60 * 60 * 24)).timeZone(ZoneId.of("UTC")) + ).addDocValueField("date").get(); + assertNoFailures(r); histogram = r.getAggregations().get("histo"); buckets = histogram.getBuckets(); @@ -1758,13 +1685,11 @@ public void testDateNanosHistogram() throws Exception { } public void testDateKeyFormatting() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("America/Edmonton")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date").calendarInterval(DateHistogramInterval.MONTH).timeZone(ZoneId.of("America/Edmonton")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); InternalDateHistogram histogram = response.getAggregations().get("histo"); List buckets = histogram.getBuckets(); @@ -1774,15 +1699,13 @@ public void testDateKeyFormatting() { } public void testHardBoundsOnDates() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateHistogram("histo").field("date") - .calendarInterval(DateHistogramInterval.DAY) - .hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateHistogram("histo").field("date") + .calendarInterval(DateHistogramInterval.DAY) + .hardBounds(new LongBounds("2012-02-01T00:00:00.000", "2012-03-03T00:00:00.000")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); InternalDateHistogram histogram = response.getAggregations().get("histo"); List buckets = histogram.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 40ff2d25e433c..64c1a7c8859fc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -72,8 +72,7 @@ private void prepareIndex(ZonedDateTime date, int numHours, int stepSizeHours, i public void testSingleValueWithPositiveOffset() throws Exception { prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, 1, 0); - SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery()) .addAggregation( dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ) @@ -92,8 +91,7 @@ public void testSingleValueWithPositiveOffset() throws Exception { public void testSingleValueWithNegativeOffset() throws Exception { prepareIndex(date("2014-03-11T00:00:00+00:00"), 5, -1, 0); - SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery()) .addAggregation( dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ) @@ -116,8 +114,7 @@ public void testSingleValueWithOffsetMinDocCount() throws Exception { prepareIndex(date("2014-03-11T00:00:00+00:00"), 12, 1, 0); prepareIndex(date("2014-03-14T00:00:00+00:00"), 12, 1, 13); - SearchResponse response = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx2").setQuery(matchAllQuery()) .addAggregation( dateHistogram("date_histo").field("date") .offset("6h") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index 27fb5c49df605..44b0ff05ea274 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -42,7 +42,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -127,16 +127,14 @@ public void testDateMath() throws Exception { } else { rangeBuilder.script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.EXTRACT_FIELD, params)); } - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - rangeBuilder.addUnboundedTo("a long time ago", "now-50y") - .addRange("recently", "now-50y", "now-1y") - .addUnboundedFrom("last year", "now-1y") - .timeZone(ZoneId.of("Etc/GMT+5")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + rangeBuilder.addUnboundedTo("a long time ago", "now-50y") + .addRange("recently", "now-50y", "now-1y") + .addUnboundedFrom("last year", "now-1y") + .timeZone(ZoneId.of("Etc/GMT+5")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -162,16 +160,11 @@ public void testDateMath() throws Exception { } public void testSingleValueField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("date") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("date").addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -208,16 +201,14 @@ public void testSingleValueField() throws Exception { } public void testSingleValueFieldWithStringDates() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("date") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-03-15") - .addUnboundedFrom("2012-03-15") - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("date") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-03-15") + .addUnboundedFrom("2012-03-15") + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -254,17 +245,15 @@ public void testSingleValueFieldWithStringDates() throws Exception { } public void testSingleValueFieldWithStringDatesWithCustomFormat() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("date") - .format("yyyy-MM-dd") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-03-15") - .addUnboundedFrom("2012-03-15") - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("date") + .format("yyyy-MM-dd") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-03-15") + .addUnboundedFrom("2012-03-15") + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -308,17 +297,15 @@ public void testSingleValueFieldWithDateMath() throws Exception { String mar15Suffix = timeZoneOffset == 0 ? "Z" : date(3, 15, timezone).format(DateTimeFormatter.ofPattern("xxx", Locale.ROOT)); long expectedFirstBucketCount = timeZoneOffset < 0 ? 3L : 2L; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("date") - .addUnboundedTo("2012-02-15") - .addRange("2012-02-15", "2012-02-15||+1M") - .addUnboundedFrom("2012-02-15||+1M") - .timeZone(timezone) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("date") + .addUnboundedTo("2012-02-15") + .addRange("2012-02-15", "2012-02-15||+1M") + .addUnboundedFrom("2012-02-15||+1M") + .timeZone(timezone) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -355,16 +342,14 @@ public void testSingleValueFieldWithDateMath() throws Exception { } public void testSingleValueFieldWithCustomKey() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("date") - .addUnboundedTo("r1", date(2, 15)) - .addRange("r2", date(2, 15), date(3, 15)) - .addUnboundedFrom("r3", date(3, 15)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("date") + .addUnboundedTo("r1", date(2, 15)) + .addRange("r2", date(2, 15), date(3, 15)) + .addUnboundedFrom("r3", date(3, 15)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -410,17 +395,15 @@ public void testSingleValueFieldWithCustomKey() throws Exception { */ public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("date") - .addUnboundedTo("r1", date(2, 15)) - .addRange("r2", date(2, 15), date(3, 15)) - .addUnboundedFrom("r3", date(3, 15)) - .subAggregation(sum("sum").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("date") + .addUnboundedTo("r1", date(2, 15)) + .addRange("r2", date(2, 15), date(3, 15)) + .addUnboundedFrom("r3", date(3, 15)) + .subAggregation(sum("sum").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -486,16 +469,11 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { */ public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field("dates") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field("dates").addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -532,16 +510,11 @@ public void testMultiValuedField() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - dateRange("range").field("date") - .addUnboundedTo(date(2, 15)) - .addRange(date(2, 15), date(3, 15)) - .addUnboundedFrom(date(3, 15)) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + dateRange("range").field("date").addUnboundedTo(date(2, 15)).addRange(date(2, 15), date(3, 15)).addUnboundedFrom(date(3, 15)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -578,8 +551,7 @@ public void testPartiallyUnmapped() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -608,7 +580,7 @@ public void testEmptyAggregation() throws Exception { public void testNoRangesInQuery() { try { - client().prepareSearch("idx").addAggregation(dateRange("my_date_range_agg").field("value")).get(); + prepareSearch("idx").addAggregation(dateRange("my_date_range_agg").field("value")).get(); fail(); } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); @@ -625,7 +597,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("date", "type=date") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -650,8 +621,7 @@ public void testScriptCaching() throws Exception { // Test that a request using a nondeterministic script does not get cached Map params = new HashMap<>(); params.put("fieldname", "date"); - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( dateRange("foo").field("date") .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.CURRENT_DATE, params)) @@ -661,7 +631,7 @@ public void testScriptCaching() throws Exception { ) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -673,8 +643,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( dateRange("foo").field("date") .script(new Script(ScriptType.INLINE, "mockscript", DateScriptMocksPlugin.DOUBLE_PLUS_ONE_MONTH, params)) @@ -684,7 +653,7 @@ public void testScriptCaching() throws Exception { ) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -696,8 +665,7 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( dateRange("foo").field("date") .addRange( @@ -706,7 +674,7 @@ public void testScriptCaching() throws Exception { ) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -734,8 +702,7 @@ public void testRangeWithFormatStringValue() throws Exception { // using no format should work when to/from is compatible with format in // mapping - SearchResponse searchResponse = client().prepareSearch(indexName) - .setSize(0) + SearchResponse searchResponse = prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); @@ -745,8 +712,7 @@ public void testRangeWithFormatStringValue() throws Exception { // using different format should work when to/from is compatible with // format in aggregation - searchResponse = client().prepareSearch(indexName) - .setSize(0) + searchResponse = prepareSearch(indexName).setSize(0) .addAggregation( dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss") ) @@ -758,8 +724,7 @@ public void testRangeWithFormatStringValue() throws Exception { // providing numeric input with format should work, but bucket keys are // different now - searchResponse = client().prepareSearch(indexName) - .setSize(0) + searchResponse = prepareSearch(indexName).setSize(0) .addAggregation( dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ) @@ -772,8 +737,7 @@ public void testRangeWithFormatStringValue() throws Exception { // providing numeric input without format should throw an exception ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch(indexName) - .setSize(0) + () -> prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000)) .get() ); @@ -796,8 +760,7 @@ public void testRangeWithFormatNumericValue() throws Exception { // using no format should work when to/from is compatible with format in // mapping - SearchResponse searchResponse = client().prepareSearch(indexName) - .setSize(0) + SearchResponse searchResponse = prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); @@ -806,8 +769,7 @@ public void testRangeWithFormatNumericValue() throws Exception { assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); // using no format should also work when and to/from are string values - searchResponse = client().prepareSearch(indexName) - .setSize(0) + searchResponse = prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); @@ -816,8 +778,7 @@ public void testRangeWithFormatNumericValue() throws Exception { assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); // also e-notation should work, fractional parts should be truncated - searchResponse = client().prepareSearch(indexName) - .setSize(0) + searchResponse = prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); @@ -827,8 +788,7 @@ public void testRangeWithFormatNumericValue() throws Exception { // using different format should work when to/from is compatible with // format in aggregation - searchResponse = client().prepareSearch(indexName) - .setSize(0) + searchResponse = prepareSearch(indexName).setSize(0) .addAggregation( dateRange("date_range").field("date").addRange("00.16.40", "00.50.00").addRange("00.50.00", "01.06.40").format("HH.mm.ss") ) @@ -840,8 +800,7 @@ public void testRangeWithFormatNumericValue() throws Exception { // providing different numeric input with format should work, but bucket // keys are different now - searchResponse = client().prepareSearch(indexName) - .setSize(0) + searchResponse = prepareSearch(indexName).setSize(0) .addAggregation( dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java index e9180f5d8b9f2..612b4bf006aa2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -93,15 +93,14 @@ public void testIssue10719() throws Exception { // Tests that we can refer to nested elements under a sample in a path // statement boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .addAggregation( terms("genres").field("genre") .order(BucketOrder.aggregation("sample>max_price.value", asc)) .subAggregation(sampler("sample").shardSize(100).subAggregation(max("max_price").field("price"))) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms genres = response.getAggregations().get("genres"); Collection genreBuckets = genres.getBuckets(); // For this test to be useful we need >1 genre bucket to compare @@ -126,14 +125,13 @@ public void testSimpleDiversity() throws Exception { DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); List testBuckets = authors.getBuckets(); @@ -153,8 +151,8 @@ public void testNestedDiversity() throws Exception { sampleAgg.subAggregation(terms("authors").field("author")); rootTerms.subAggregation(sampleAgg); - SearchResponse response = client().prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootTerms).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootTerms).get(); + assertNoFailures(response); Terms genres = response.getAggregations().get("genres"); List genreBuckets = genres.getBuckets(); for (Terms.Bucket genreBucket : genreBuckets) { @@ -182,11 +180,8 @@ public void testNestedSamples() throws Exception { sampleAgg.subAggregation(terms("genres").field("genre")); rootSample.subAggregation(sampleAgg); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) - .addAggregation(rootSample) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH).addAggregation(rootSample).get(); + assertNoFailures(response); Sampler genreSample = response.getAggregations().get("genreSample"); Sampler sample = genreSample.getAggregations().get("sample"); @@ -210,14 +205,13 @@ public void testPartiallyUnmappedDiversifyField() throws Exception { .field("author") .maxDocsPerValue(1); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("idx_unmapped_author", "test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("idx_unmapped_author", "test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); Terms authors = sample.getAggregations().get("authors"); @@ -230,14 +224,13 @@ public void testWhollyUnmappedDiversifyField() throws Exception { DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("idx_unmapped", "idx_unmapped_author") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("idx_unmapped", "idx_unmapped_author").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); Terms authors = sample.getAggregations().get("authors"); @@ -249,26 +242,24 @@ public void testRidiculousSizeDiversity() throws Exception { DiversifiedAggregationBuilder sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(Integer.MAX_VALUE); sampleAgg.field("author").maxDocsPerValue(MAX_DOCS_PER_AUTHOR).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); sampleAgg = new DiversifiedAggregationBuilder("sample").shardSize(100); sampleAgg.field("author").maxDocsPerValue(Integer.MAX_VALUE).executionHint(randomExecutionHint()); sampleAgg.subAggregation(terms("authors").field("author")); - response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java index 1e4cfb444d47e..0381a5521dea0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java @@ -54,7 +54,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -255,14 +255,12 @@ private void getMultiSortDocs(List builders) throws IOExcep public void testSizeIsZero() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("high_card_idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .minDocCount(randomInt(1)) - .size(0) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get() + () -> prepareSearch("high_card_idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .minDocCount(randomInt(1)) + .size(0) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get() ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } @@ -277,12 +275,10 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms - SearchResponse allResponse = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(allResponse); + SearchResponse allResponse = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(allResponse); DoubleTerms terms = allResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -292,14 +288,12 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception final int numPartitions = randomIntBetween(2, 4); Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(field) - .includeExclude(new IncludeExclude(partition, numPartitions)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(field) + .includeExclude(new IncludeExclude(partition, numPartitions)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(response); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -312,15 +306,13 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -337,15 +329,13 @@ public void testSingleValuedFieldWithValueScript() throws Exception { } public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -366,15 +356,13 @@ public void testMultiValuedFieldWithValueScript() throws Exception { } public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(long) (_value / 1000 + 1)", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -406,22 +394,20 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { */ public void testScriptSingleValue() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .userValueTypeHint(ValueType.DOUBLE) - .script( - new Script( - ScriptType.INLINE, - CustomScriptPlugin.NAME, - "doc['" + MULTI_VALUED_FIELD_NAME + "'].value", - Collections.emptyMap() - ) + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .userValueTypeHint(ValueType.DOUBLE) + .script( + new Script( + ScriptType.INLINE, + CustomScriptPlugin.NAME, + "doc['" + MULTI_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -438,22 +424,15 @@ public void testScriptSingleValue() throws Exception { } public void testScriptMultiValued() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .userValueTypeHint(ValueType.DOUBLE) - .script( - new Script( - ScriptType.INLINE, - CustomScriptPlugin.NAME, - "doc['" + MULTI_VALUED_FIELD_NAME + "']", - Collections.emptyMap() - ) - ) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .userValueTypeHint(ValueType.DOUBLE) + .script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()) + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -474,13 +453,11 @@ public void testScriptMultiValued() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); + SearchResponse response = prepareSearch("idx_unmapped", "idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -497,15 +474,13 @@ public void testPartiallyUnmapped() throws Exception { } public void testPartiallyUnmappedWithFormat() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .format("0000.00") - ) - .get(); + SearchResponse response = prepareSearch("idx_unmapped", "idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .format("0000.00") + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -524,20 +499,18 @@ public void testPartiallyUnmappedWithFormat() throws Exception { public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTermsAgg() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation( - new TermsAggregationBuilder("subTerms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", asc)) + .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation( + new TermsAggregationBuilder("subTerms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -569,16 +542,14 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithSubTer public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("num_tags").field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter", asc)) - .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("num_tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter", asc)) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms tags = response.getAggregations().get("num_tags"); assertThat(tags, notNullValue()); @@ -606,20 +577,18 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("tags").field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>filter2>max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>filter2>max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)) ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); @@ -663,13 +632,11 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels( public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", true)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", true)) + ).get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist"); @@ -682,17 +649,14 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("num_tags", true)) - .subAggregation( - new TermsAggregationBuilder("num_tags").field("num_tags") - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("num_tags", true)) + .subAggregation( + new TermsAggregationBuilder("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ).get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation which is not of a metrics type"); @@ -705,14 +669,12 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME + "2") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.foo", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME + "2") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.foo", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); fail( "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " @@ -728,14 +690,12 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMe public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); fail( "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " @@ -750,16 +710,14 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -780,16 +738,14 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -810,16 +766,14 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.variance", asc)) - .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.variance", asc)) + .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -854,8 +808,7 @@ public void testScriptScore() { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(functionScoreQuery(scriptFunction(scoringScript))) + SearchResponse response = prepareSearch("idx").setQuery(functionScoreQuery(scriptFunction(scoringScript))) .addAggregation( new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) .userValueTypeHint(ValueType.DOUBLE) @@ -863,7 +816,7 @@ public void testScriptScore() { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -920,17 +873,15 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound } private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) { - SearchResponse response = client().prepareSearch("sort_idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d")) - ) - .get(); + SearchResponse response = prepareSearch("sort_idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); DoubleTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -964,7 +915,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=float") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -983,14 +933,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( new TermsAggregationBuilder("terms").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1002,14 +951,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( new TermsAggregationBuilder("terms").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1021,8 +969,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 4d799cbbea6b5..5971e287882f2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -79,9 +79,9 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1"))).get(); + SearchResponse response = prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1"))).get(); - assertSearchResponse(response); + assertNoFailures(response); Filter filter = response.getAggregations().get("tag1"); assertThat(filter, notNullValue()); @@ -93,9 +93,9 @@ public void testSimple() throws Exception { // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); - SearchResponse response = client().prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).get(); + SearchResponse response = prepareSearch("idx").addAggregation(filter("tag1", emptyFilter)).get(); - assertSearchResponse(response); + assertNoFailures(response); Filter filter = response.getAggregations().get("tag1"); assertThat(filter, notNullValue()); @@ -103,11 +103,11 @@ public void testEmptyFilterDeclarations() throws Exception { } public void testWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value").field("value"))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filter filter = response.getAggregations().get("tag1"); assertThat(filter, notNullValue()); @@ -128,11 +128,11 @@ public void testWithSubAggregation() throws Exception { } public void testAsSubAggregation() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field("value").interval(2L).subAggregation(filter("filter", matchAllQuery()))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field("value").interval(2L).subAggregation(filter("filter", matchAllQuery())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -147,7 +147,7 @@ public void testAsSubAggregation() { public void testWithContextBasedSubAggregation() throws Exception { try { - client().prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value"))).get(); + prepareSearch("idx").addAggregation(filter("tag1", termQuery("tag", "tag1")).subAggregation(avg("avg_value"))).get(); fail( "expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" @@ -160,8 +160,7 @@ public void testWithContextBasedSubAggregation() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery()))) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index 68acd22ce6c55..fa8974371a935 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -34,7 +34,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.filters; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -98,16 +98,14 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters( - "tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) ) - .get(); + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -128,13 +126,11 @@ public void testSimple() throws Exception { // https://github.com/elastic/elasticsearch/issues/8438 public void testEmptyFilterDeclarations() throws Exception { QueryBuilder emptyFilter = new BoolQueryBuilder(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1")))) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filters("tags", randomOrder(new KeyedFilter("all", emptyFilter), new KeyedFilter("tag1", termQuery("tag", "tag1")))) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -147,16 +143,14 @@ public void testEmptyFilterDeclarations() throws Exception { } public void testWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters( - "tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) - ).subAggregation(avg("avg_value").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).subAggregation(avg("avg_value").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -202,11 +196,11 @@ public void testWithSubAggregation() throws Exception { } public void testAsSubAggregation() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field("value").interval(2L).subAggregation(filters("filters", matchAllQuery()))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field("value").interval(2L).subAggregation(filters("filters", matchAllQuery())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -224,14 +218,12 @@ public void testAsSubAggregation() { public void testWithContextBasedSubAggregation() throws Exception { try { - client().prepareSearch("idx") - .addAggregation( - filters( - "tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) - ).subAggregation(avg("avg_value")) - ) - .get(); + prepareSearch("idx").addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).subAggregation(avg("avg_value")) + ).get(); fail( "expected execution to fail - an attempt to have a context based numeric sub-aggregation, but there is not value source" @@ -244,8 +236,7 @@ public void testWithContextBasedSubAggregation() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -269,11 +260,10 @@ public void testEmptyAggregation() throws Exception { } public void testSimpleNonKeyed() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2"))) + SearchResponse response = prepareSearch("idx").addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2"))) .get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -294,16 +284,14 @@ public void testSimpleNonKeyed() throws Exception { } public void testOtherBucket() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters( - "tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) - ).otherBucket(true) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).otherBucket(true) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -325,16 +313,14 @@ public void testOtherBucket() throws Exception { } public void testOtherNamedBucket() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters( - "tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) - ).otherBucket(true).otherBucketKey("foobar") - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).otherBucket(true).otherBucketKey("foobar") + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -356,11 +342,11 @@ public void testOtherNamedBucket() throws Exception { } public void testOtherNonKeyed() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filters("tags", termQuery("tag", "tag1"), termQuery("tag", "tag2")).otherBucket(true) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -385,16 +371,14 @@ public void testOtherNonKeyed() throws Exception { } public void testOtherWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filters( - "tags", - randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) - ).otherBucket(true).subAggregation(avg("avg_value").field("value")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + filters( + "tags", + randomOrder(new KeyedFilter("tag1", termQuery("tag", "tag1")), new KeyedFilter("tag2", termQuery("tag", "tag2"))) + ).otherBucket(true).subAggregation(avg("avg_value").field("value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filters filters = response.getAggregations().get("tags"); assertThat(filters, notNullValue()); @@ -456,8 +440,7 @@ public void testOtherWithSubAggregation() throws Exception { } public void testEmptyAggregationWithOtherBucket() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index df3f1b3e0fdb3..7639445f1f5ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -39,7 +39,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.geoDistance; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -142,9 +142,9 @@ public void testSimple() throws Exception { for (Consumer range : ranges) { range.accept(builder); } - SearchResponse response = client().prepareSearch("idx").addAggregation(builder).get(); + SearchResponse response = prepareSearch("idx").addAggregation(builder).get(); - assertSearchResponse(response); + assertNoFailures(response); Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); @@ -181,17 +181,15 @@ public void testSimple() throws Exception { } public void testSimpleWithCustomKeys() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo("ring1", 500) - .addRange("ring2", 500, 1000) - .addUnboundedFrom("ring3", 1000) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo("ring1", 500) + .addRange("ring2", 500, 1000) + .addUnboundedFrom("ring3", 1000) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); @@ -230,17 +228,15 @@ public void testSimpleWithCustomKeys() throws Exception { public void testUnmapped() throws Exception { clusterAdmin().prepareHealth("idx_unmapped").setWaitForYellowStatus().get(); - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation( - geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000) - ) - .get(); + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); @@ -277,17 +273,15 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); @@ -324,18 +318,16 @@ public void testPartiallyUnmapped() throws Exception { } public void testWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") - .unit(DistanceUnit.KILOMETERS) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000) - .subAggregation(terms("cities").field("city").collectMode(randomFrom(SubAggCollectionMode.values()))) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + .subAggregation(terms("cities").field("city").collectMode(randomFrom(SubAggCollectionMode.values()))) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); @@ -409,8 +401,7 @@ public void testWithSubAggregation() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -441,7 +432,7 @@ public void testEmptyAggregation() throws Exception { public void testNoRangesInQuery() { try { - client().prepareSearch("idx").addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location")).get(); + prepareSearch("idx").addAggregation(geoDistance("geo_dist", new GeoPoint(52.3760, 4.894)).field("location")).get(); fail(); } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); @@ -451,18 +442,16 @@ public void testNoRangesInQuery() { } public void testMultiValues() throws Exception { - SearchResponse response = client().prepareSearch("idx-multi") - .addAggregation( - geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") - .unit(DistanceUnit.KILOMETERS) - .distanceType(org.elasticsearch.common.geo.GeoDistance.ARC) - .addUnboundedTo(500) - .addRange(500, 1000) - .addUnboundedFrom(1000) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx-multi").addAggregation( + geoDistance("amsterdam_rings", new GeoPoint(52.3760, 4.894)).field("location") + .unit(DistanceUnit.KILOMETERS) + .distanceType(org.elasticsearch.common.geo.GeoDistance.ARC) + .addUnboundedTo(500) + .addRange(500, 1000) + .addUnboundedFrom(1000) + ).get(); + + assertNoFailures(response); Range geoDist = response.getAggregations().get("amsterdam_rings"); assertThat(geoDist, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index 151de82117bc1..1cd8d5bc2fc3d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -36,7 +36,7 @@ import static org.elasticsearch.geometry.utils.Geohash.stringEncode; import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -132,11 +132,10 @@ public void setupSuiteScopeCluster() throws Exception { public void testSimple() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + SearchResponse response = prepareSearch("idx").addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) .get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); List buckets = geoGrid.getBuckets(); @@ -159,11 +158,11 @@ public void testSimple() throws Exception { public void testMultivalued() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { - SearchResponse response = client().prepareSearch("multi_valued_idx") - .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) - .get(); + SearchResponse response = prepareSearch("multi_valued_idx").addAggregation( + geohashGrid("geohashgrid").field("location").precision(precision) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); for (GeoGrid.Bucket cell : geoGrid.getBuckets()) { @@ -181,14 +180,12 @@ public void testFiltered() throws Exception { GeoBoundingBoxQueryBuilder bbox = new GeoBoundingBoxQueryBuilder("location"); bbox.setCorners(smallestGeoHash).queryName("bbox"); for (int precision = 1; precision <= PRECISION; precision++) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.filter("filtered", bbox) - .subAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + AggregationBuilders.filter("filtered", bbox) + .subAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Filter filter = response.getAggregations().get("filtered"); @@ -207,11 +204,11 @@ public void testFiltered() throws Exception { public void testUnmapped() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) - .get(); + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + geohashGrid("geohashgrid").field("location").precision(precision) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); assertThat(geoGrid.getBuckets().size(), equalTo(0)); @@ -221,11 +218,11 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(geohashGrid("geohashgrid").field("location").precision(precision)) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + geohashGrid("geohashgrid").field("location").precision(precision) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); for (GeoGrid.Bucket cell : geoGrid.getBuckets()) { @@ -241,11 +238,11 @@ public void testPartiallyUnmapped() throws Exception { public void testTopMatch() throws Exception { for (int precision = 1; precision <= PRECISION; precision++) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + geohashGrid("geohashgrid").field("location").size(1).shardSize(100).precision(precision) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid geoGrid = response.getAggregations().get("geohashgrid"); // Check we only have one bucket with the best match for that resolution @@ -270,9 +267,7 @@ public void testSizeIsZero() { final int shardSize = 10000; IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)) - .get() + () -> prepareSearch("idx").addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)).get() ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [geohashgrid]")); } @@ -282,9 +277,7 @@ public void testShardSizeIsZero() { final int shardSize = 0; IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)) - .get() + () -> prepareSearch("idx").addAggregation(geohashGrid("geohashgrid").field("location").size(size).shardSize(shardSize)).get() ); assertThat(exception.getMessage(), containsString("[shardSize] must be greater than 0. Found [0] in [geohashgrid]")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java index 1ccb47a8517c4..347b2324027c0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GlobalIT.java @@ -21,7 +21,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -60,12 +60,11 @@ public void setupSuiteScopeCluster() throws Exception { } public void testWithStatsSubAggregator() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.termQuery("tag", "tag1")) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.termQuery("tag", "tag1")) .addAggregation(global("global").subAggregation(stats("value_stats").field("value"))) .get(); - assertSearchResponse(response); + assertNoFailures(response); Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -91,8 +90,7 @@ public void testWithStatsSubAggregator() throws Exception { public void testNonTopLevel() throws Exception { try { - client().prepareSearch("idx") - .setQuery(QueryBuilders.termQuery("tag", "tag1")) + prepareSearch("idx").setQuery(QueryBuilders.termQuery("tag", "tag1")) .addAggregation(global("global").subAggregation(global("inner_global"))) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 3f47240fb0f9d..07b678e89c024 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -51,7 +51,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -234,11 +234,10 @@ private void getMultiSortDocs(List builders) throws IOExcep } public void testSingleValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) + SearchResponse response = prepareSearch("idx").addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -257,9 +256,9 @@ public void testSingleValuedField() throws Exception { public void singleValuedField_withOffset() throws Exception { int interval1 = 10; int offset = 5; - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval1).offset(offset) + ).get(); // from setup we have between 6 and 20 documents, each with value 1 in test field int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; @@ -287,10 +286,10 @@ public void singleValuedField_withOffset() throws Exception { */ public void testSingleValuedFieldWithRandomOffset() throws Exception { int offset = randomIntBetween(2, interval); - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset)) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).offset(offset) + ).get(); + assertNoFailures(response); // shifting by offset>2 creates new extra bucket [0,offset-1] // if offset is >= number of values in original last bucket, that effect is canceled int expectedNumberOfBuckets = (offset >= (numDocs % interval + 1)) ? numValueBuckets : numValueBuckets + 1; @@ -320,11 +319,11 @@ public void testSingleValuedFieldWithRandomOffset() throws Exception { } public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(true))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(true)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -341,11 +340,11 @@ public void testSingleValuedFieldOrderedByKeyAsc() throws Exception { } public void testsingleValuedFieldOrderedByKeyDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -362,11 +361,11 @@ public void testsingleValuedFieldOrderedByKeyDesc() throws Exception { } public void testSingleValuedFieldOrderedByCountAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.count(true))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.count(true)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -389,11 +388,11 @@ public void testSingleValuedFieldOrderedByCountAsc() throws Exception { } public void testSingleValuedFieldOrderedByCountDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.count(false))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.count(false)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -416,15 +415,11 @@ public void testSingleValuedFieldOrderedByCountDesc() throws Exception { } public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -458,16 +453,14 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { } public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("sum", true)) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("sum", true)) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -500,16 +493,14 @@ public void testSingleValuedFieldOrderedBySubAggregationAsc() throws Exception { } public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("sum", false)) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("sum", false)) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -542,16 +533,14 @@ public void testSingleValuedFieldOrderedBySubAggregationDesc() throws Exception } public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("stats.sum", false)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("stats.sum", false)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -586,16 +575,14 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationDesc() throws public void testSingleValuedFieldOrderedBySubAggregationDescDeepOrderPath() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("filter>max", asc)) - .subAggregation(filter("filter", matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("filter>max", asc)) + .subAggregation(filter("filter", matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -624,16 +611,14 @@ public void testSingleValuedFieldOrderedBySubAggregationDescDeepOrderPath() thro } public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("max_constant", randomBoolean())) - .subAggregation(max("max_constant").field("constant")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("max_constant", randomBoolean())) + .subAggregation(max("max_constant").field("constant")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -652,18 +637,14 @@ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception { public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { - client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .order(BucketOrder.aggregation("inner_histo>avg", asc)) - .subAggregation( - histogram("inner_histo").interval(interval) - .field(MULTI_VALUED_FIELD_NAME) - .subAggregation(avg("avg").field("value")) - ) - ) - .get(); + prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .order(BucketOrder.aggregation("inner_histo>avg", asc)) + .subAggregation( + histogram("inner_histo").interval(interval).field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("value")) + ) + ).get(); fail("Expected an exception"); } catch (SearchPhaseExecutionException e) { ElasticsearchException[] rootCauses = e.guessRootCauses(); @@ -681,15 +662,13 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) - .interval(interval) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) + .interval(interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); final int numBuckets = (numDocs + 1) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 1) / interval + 1]; @@ -713,11 +692,10 @@ public void testSingleValuedFieldWithValueScript() throws Exception { } public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) + SearchResponse response = prepareSearch("idx").addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -734,11 +712,11 @@ public void testMultiValuedField() throws Exception { } public void testMultiValuedFieldOrderedByKeyDesc() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -755,15 +733,13 @@ public void testMultiValuedFieldOrderedByKeyDesc() throws Exception { } public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(MULTI_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) - .interval(interval) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(MULTI_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) + .interval(interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); final int numBuckets = (numDocs + 2) / interval - 2 / interval + 1; final long[] counts = new long[(numDocs + 2) / interval + 1]; @@ -792,14 +768,12 @@ public void testMultiValuedFieldWithValueScript() throws Exception { } public void testScriptSingleValue() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_value'].value", emptyMap())) - .interval(interval) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_value'].value", emptyMap())) + .interval(interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -816,14 +790,12 @@ public void testScriptSingleValue() throws Exception { } public void testScriptMultiValued() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_values']", emptyMap())) - .interval(interval) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['l_values']", emptyMap())) + .interval(interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -840,11 +812,11 @@ public void testScriptMultiValued() throws Exception { } public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) - .get(); + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -853,11 +825,11 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -874,15 +846,13 @@ public void testPartiallyUnmapped() throws Exception { } public void testPartiallyUnmappedWithExtendedBounds() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(-1 * 2 * interval, valueCounts.length * interval) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(-1 * 2 * interval, valueCounts.length * interval) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -909,8 +879,7 @@ public void testPartiallyUnmappedWithExtendedBounds() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME) .interval(1L) @@ -968,14 +937,9 @@ public void testSingleValuedFieldWithExtendedBounds() throws Exception { SearchResponse response = null; try { - response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .minDocCount(0) - .extendedBounds(boundsMin, boundsMax) - ) - .get(); + response = prepareSearch("idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).minDocCount(0).extendedBounds(boundsMin, boundsMax) + ).get(); if (invalidBoundsError) { fail("Expected an exception to be thrown when bounds.min is greater than bounds.max"); @@ -990,7 +954,7 @@ public void testSingleValuedFieldWithExtendedBounds() throws Exception { throw e; } } - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -1044,8 +1008,7 @@ public void testEmptyWithExtendedBounds() throws Exception { SearchResponse response = null; try { - response = client().prepareSearch("idx") - .setQuery(QueryBuilders.termQuery("foo", "bar")) + response = prepareSearch("idx").setQuery(QueryBuilders.termQuery("foo", "bar")) .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME) .interval(interval) @@ -1067,7 +1030,7 @@ public void testEmptyWithExtendedBounds() throws Exception { throw e; } } - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -1090,8 +1053,7 @@ public void testEmptyWithExtendedBounds() throws Exception { */ public void testExeptionOnNegativerInterval() { try { - client().prepareSearch("empty_bucket_idx") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)) + prepareSearch("empty_bucket_idx").addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(-1).minDocCount(0)) .get(); fail(); } catch (IllegalArgumentException e) { @@ -1107,10 +1069,8 @@ public void testDecimalIntervalAndOffset() throws Exception { client().prepareIndex("decimal_values").setId("2").setSource("d", 0.1) ); - SearchResponse r = client().prepareSearch("decimal_values") - .addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)) - .get(); - assertSearchResponse(r); + SearchResponse r = prepareSearch("decimal_values").addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)).get(); + assertNoFailures(r); Histogram histogram = r.getAggregations().get("histo"); List buckets = histogram.getBuckets(); @@ -1129,7 +1089,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=float") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -1148,8 +1107,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( histogram("histo").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", emptyMap())) @@ -1157,7 +1115,7 @@ public void testScriptCaching() throws Exception { .offset(0.05) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1169,8 +1127,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( histogram("histo").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", emptyMap())) @@ -1178,7 +1135,7 @@ public void testScriptCaching() throws Exception { .offset(0.05) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1190,11 +1147,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx") - .setSize(0) - .addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)) - .get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(histogram("histo").field("d").interval(0.7).offset(0.05)).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1249,21 +1203,17 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound public void testInvalidBounds() { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("empty_bucket_idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(0.0, 10.0)).extendedBounds(3, 20) - ) - .get() + () -> prepareSearch("empty_bucket_idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(0.0, 10.0)).extendedBounds(3, 20) + ).get() ); assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds")); e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("empty_bucket_idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(3.0, null)).extendedBounds(0, 20) - ) - .get() + () -> prepareSearch("empty_bucket_idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).hardBounds(new DoubleBounds(3.0, null)).extendedBounds(0, 20) + ).get() ); assertThat(e.toString(), containsString("Extended bounds have to be inside hard bounds, hard bounds")); } @@ -1277,10 +1227,10 @@ public void testHardBounds() throws Exception { client().prepareIndex("test").setId("3").setSource("d", 0.1) ); - SearchResponse r = client().prepareSearch("test") - .addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, null))) - .get(); - assertSearchResponse(r); + SearchResponse r = prepareSearch("test").addAggregation( + histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, null)) + ).get(); + assertNoFailures(r); Histogram histogram = r.getAggregations().get("histo"); List buckets = histogram.getBuckets(); @@ -1288,20 +1238,16 @@ public void testHardBounds() throws Exception { assertEquals(0.1, (double) buckets.get(0).getKey(), 0.01d); assertEquals(0.5, (double) buckets.get(4).getKey(), 0.01d); - r = client().prepareSearch("test") - .addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(null, 0.0))) - .get(); - assertSearchResponse(r); + r = prepareSearch("test").addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(null, 0.0))).get(); + assertNoFailures(r); histogram = r.getAggregations().get("histo"); buckets = histogram.getBuckets(); assertEquals(1, buckets.size()); assertEquals(-0.6, (double) buckets.get(0).getKey(), 0.01d); - r = client().prepareSearch("test") - .addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, 0.3))) - .get(); - assertSearchResponse(r); + r = prepareSearch("test").addAggregation(histogram("histo").field("d").interval(0.1).hardBounds(new DoubleBounds(0.0, 0.3))).get(); + assertNoFailures(r); histogram = r.getAggregations().get("histo"); buckets = histogram.getBuckets(); @@ -1311,17 +1257,15 @@ public void testHardBounds() throws Exception { } private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) { - SearchResponse response = client().prepareSearch("sort_idx") - .addAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(1) - .order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d")) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("sort_idx").addAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(1) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ).get(); + + assertNoFailures(response); Histogram histogram = response.getAggregations().get("histo"); assertThat(histogram, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java index 22cf163c44005..8e4c503b89bb5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpRangeIT.java @@ -25,7 +25,7 @@ import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -64,16 +64,14 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSingleValuedField() { - SearchResponse rsp = client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ip") - .addUnboundedTo("192.168.1.0") - .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10") - ) - .get(); - assertSearchResponse(rsp); + SearchResponse rsp = prepareSearch("idx").addAggregation( + AggregationBuilders.ipRange("my_range") + .field("ip") + .addUnboundedTo("192.168.1.0") + .addRange("192.168.1.0", "192.168.1.10") + .addUnboundedFrom("192.168.1.10") + ).get(); + assertNoFailures(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -97,16 +95,14 @@ public void testSingleValuedField() { } public void testMultiValuedField() { - SearchResponse rsp = client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ips") - .addUnboundedTo("192.168.1.0") - .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10") - ) - .get(); - assertSearchResponse(rsp); + SearchResponse rsp = prepareSearch("idx").addAggregation( + AggregationBuilders.ipRange("my_range") + .field("ips") + .addUnboundedTo("192.168.1.0") + .addRange("192.168.1.0", "192.168.1.10") + .addUnboundedFrom("192.168.1.10") + ).get(); + assertNoFailures(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -130,16 +126,14 @@ public void testMultiValuedField() { } public void testIpMask() { - SearchResponse rsp = client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ips") - .addMaskRange("::/0") - .addMaskRange("0.0.0.0/0") - .addMaskRange("2001:db8::/64") - ) - .get(); - assertSearchResponse(rsp); + SearchResponse rsp = prepareSearch("idx").addAggregation( + AggregationBuilders.ipRange("my_range") + .field("ips") + .addMaskRange("::/0") + .addMaskRange("0.0.0.0/0") + .addMaskRange("2001:db8::/64") + ).get(); + assertNoFailures(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -157,16 +151,14 @@ public void testIpMask() { } public void testPartiallyUnmapped() { - SearchResponse rsp = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ip") - .addUnboundedTo("192.168.1.0") - .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10") - ) - .get(); - assertSearchResponse(rsp); + SearchResponse rsp = prepareSearch("idx", "idx_unmapped").addAggregation( + AggregationBuilders.ipRange("my_range") + .field("ip") + .addUnboundedTo("192.168.1.0") + .addRange("192.168.1.0", "192.168.1.10") + .addUnboundedFrom("192.168.1.10") + ).get(); + assertNoFailures(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -190,16 +182,14 @@ public void testPartiallyUnmapped() { } public void testUnmapped() { - SearchResponse rsp = client().prepareSearch("idx_unmapped") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ip") - .addUnboundedTo("192.168.1.0") - .addRange("192.168.1.0", "192.168.1.10") - .addUnboundedFrom("192.168.1.10") - ) - .get(); - assertSearchResponse(rsp); + SearchResponse rsp = prepareSearch("idx_unmapped").addAggregation( + AggregationBuilders.ipRange("my_range") + .field("ip") + .addUnboundedTo("192.168.1.0") + .addRange("192.168.1.0", "192.168.1.10") + .addUnboundedFrom("192.168.1.10") + ).get(); + assertNoFailures(rsp); Range range = rsp.getAggregations().get("my_range"); assertEquals(3, range.getBuckets().size()); @@ -225,12 +215,9 @@ public void testUnmapped() { public void testRejectsScript() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) - ) - .get() + () -> prepareSearch("idx").addAggregation( + AggregationBuilders.ipRange("my_range").script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) + ).get() ); assertThat(e.getMessage(), containsString("[ip_range] does not support scripts")); } @@ -238,20 +225,18 @@ public void testRejectsScript() { public void testRejectsValueScript() { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .addAggregation( - AggregationBuilders.ipRange("my_range") - .field("ip") - .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) - ) - .get() + () -> prepareSearch("idx").addAggregation( + AggregationBuilders.ipRange("my_range") + .field("ip") + .script(new Script(ScriptType.INLINE, "mockscript", "dummy", Collections.emptyMap())) + ).get() ); assertThat(e.getMessage(), containsString("[ip_range] does not support scripts")); } public void testNoRangesInQuery() { try { - client().prepareSearch("idx").addAggregation(AggregationBuilders.ipRange("my_range").field("ip")).get(); + prepareSearch("idx").addAggregation(AggregationBuilders.ipRange("my_range").field("ip")).get(); fail(); } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java index 568f18b15ecec..d50ea294287eb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/IpTermsIT.java @@ -22,7 +22,7 @@ import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; public class IpTermsIT extends AbstractTermsTestCase { @@ -61,10 +61,10 @@ public void testScriptValue() throws Exception { ); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip'].value", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("index") - .addAggregation(new TermsAggregationBuilder("my_terms").script(script).executionHint(randomExecutionHint())) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("index").addAggregation( + new TermsAggregationBuilder("my_terms").script(script).executionHint(randomExecutionHint()) + ).get(); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); @@ -89,10 +89,10 @@ public void testScriptValues() throws Exception { ); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['ip']", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("index") - .addAggregation(new TermsAggregationBuilder("my_terms").script(script).executionHint(randomExecutionHint())) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("index").addAggregation( + new TermsAggregationBuilder("my_terms").script(script).executionHint(randomExecutionHint()) + ).get(); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); @@ -116,11 +116,11 @@ public void testMissingValue() throws Exception { client().prepareIndex("index").setId("3").setSource("ip", "127.0.0.1"), client().prepareIndex("index").setId("4").setSource("not_ip", "something") ); - SearchResponse response = client().prepareSearch("index") - .addAggregation(new TermsAggregationBuilder("my_terms").field("ip").missing("127.0.0.1").executionHint(randomExecutionHint())) - .get(); + SearchResponse response = prepareSearch("index").addAggregation( + new TermsAggregationBuilder("my_terms").field("ip").missing("127.0.0.1").executionHint(randomExecutionHint()) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("my_terms"); assertEquals(2, terms.getBuckets().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java index fa5a23ecb322f..6c3d1c44aafed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java @@ -53,7 +53,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -243,14 +243,12 @@ private void getMultiSortDocs(List builders) throws IOExcep public void testSizeIsZero() { IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("high_card_idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .minDocCount(randomInt(1)) - .size(0) - ) - .get() + () -> prepareSearch("high_card_idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .minDocCount(randomInt(1)) + .size(0) + ).get() ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } @@ -265,10 +263,10 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms - SearchResponse allResponse = client().prepareSearch("idx") - .addAggregation(new TermsAggregationBuilder("terms").field(field).collectMode(randomFrom(SubAggCollectionMode.values()))) - .get(); - assertSearchResponse(allResponse); + SearchResponse allResponse = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(field).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(allResponse); LongTerms terms = allResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -278,14 +276,12 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception final int numPartitions = randomIntBetween(2, 4); Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(field) - .includeExclude(new IncludeExclude(partition, numPartitions)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(field) + .includeExclude(new IncludeExclude(partition, numPartitions)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(response); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -298,15 +294,13 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); // Scripts force the results to doubles DoubleTerms terms = response.getAggregations().get("terms"); @@ -324,15 +318,13 @@ public void testSingleValuedFieldWithValueScript() throws Exception { } public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value - 1", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value - 1", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); // Scripts force the results to doubles DoubleTerms terms = response.getAggregations().get("terms"); @@ -354,15 +346,13 @@ public void testMultiValuedFieldWithValueScript() throws Exception { } public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / 1000 + 1)", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "floor(_value / 1000 + 1)", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); // The script always converts long to double DoubleTerms terms = response.getAggregations().get("terms"); @@ -402,15 +392,13 @@ public void testScriptSingleValue() throws Exception { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .userValueTypeHint(ValueType.LONG) - .script(script) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .userValueTypeHint(ValueType.LONG) + .script(script) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -435,15 +423,13 @@ public void testScriptMultiValued() throws Exception { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .userValueTypeHint(ValueType.LONG) - .script(script) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .userValueTypeHint(ValueType.LONG) + .script(script) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -464,13 +450,11 @@ public void testScriptMultiValued() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); + SearchResponse response = prepareSearch("idx_unmapped", "idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -487,15 +471,13 @@ public void testPartiallyUnmapped() throws Exception { } public void testPartiallyUnmappedWithFormat() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped", "idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .format("0000") - ) - .get(); + SearchResponse response = prepareSearch("idx_unmapped", "idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .format("0000") + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -514,20 +496,18 @@ public void testPartiallyUnmappedWithFormat() throws Exception { public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithTermsSubAgg() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", asc)) - .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) - .subAggregation( - new TermsAggregationBuilder("subTerms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", asc)) + .subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)) + .subAggregation( + new TermsAggregationBuilder("subTerms").field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -559,16 +539,14 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscWithTermsS public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("num_tags").field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter", asc)) - .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("num_tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter", asc)) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms tags = response.getAggregations().get("num_tags"); assertThat(tags, notNullValue()); @@ -596,20 +574,18 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("tags").field("num_tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>filter2>max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("tags").field("num_tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>filter2>max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME)) ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); @@ -653,13 +629,11 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels( public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", true)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", true)) + ).get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist"); @@ -672,17 +646,14 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("num_tags", true)) - .subAggregation( - new TermsAggregationBuilder("num_tags").field("num_tags") - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("num_tags", true)) + .subAggregation( + new TermsAggregationBuilder("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ).get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation which is not of a metrics type"); @@ -695,14 +666,12 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.foo", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.foo", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); fail( "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " @@ -718,14 +687,12 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMe public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats", true)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats", true)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); fail( "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " @@ -740,16 +707,14 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -771,16 +736,14 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -802,16 +765,14 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.variance", asc)) - .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.variance", asc)) + .subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -872,17 +833,15 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound } private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) { - SearchResponse response = client().prepareSearch("sort_idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d")) - ) - .get(); + SearchResponse response = prepareSearch("sort_idx").addAggregation( + new TermsAggregationBuilder("terms").field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -916,7 +875,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -935,14 +893,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( new TermsAggregationBuilder("terms").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -954,14 +911,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( new TermsAggregationBuilder("terms").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -973,8 +929,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index 3a49c5ba7132e..36ba2a988668a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -306,8 +306,7 @@ private void testMinDocCountOnTerms(String field, Script script, BucketOrder ord private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception { // all terms - final SearchResponse allTermsResponse = client().prepareSearch("idx") - .setSize(0) + final SearchResponse allTermsResponse = prepareSearch("idx").setSize(0) .setQuery(QUERY) .addAggregation( script.apply(terms("terms"), field) @@ -325,8 +324,7 @@ private void testMinDocCountOnTerms(String field, Script script, BucketOrder ord for (long minDocCount = 0; minDocCount < 20; ++minDocCount) { final int size = randomIntBetween(1, cardinality + 2); - final SearchRequest request = client().prepareSearch("idx") - .setSize(0) + final SearchRequest request = prepareSearch("idx").setSize(0) .setQuery(QUERY) .addAggregation( script.apply(terms("terms"), field) @@ -379,8 +377,7 @@ public void testDateHistogramKeyDesc() throws Exception { private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { final int interval = randomIntBetween(1, 3); - final SearchResponse allResponse = client().prepareSearch("idx") - .setSize(0) + final SearchResponse allResponse = prepareSearch("idx").setSize(0) .setQuery(QUERY) .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0)) .get(); @@ -388,8 +385,7 @@ private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { final Histogram allHisto = allResponse.getAggregations().get("histo"); for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { - final SearchResponse response = client().prepareSearch("idx") - .setSize(0) + final SearchResponse response = prepareSearch("idx").setSize(0) .setQuery(QUERY) .addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount)) .get(); @@ -398,8 +394,7 @@ private void testMinDocCountOnHistogram(BucketOrder order) throws Exception { } private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception { - final SearchResponse allResponse = client().prepareSearch("idx") - .setSize(0) + final SearchResponse allResponse = prepareSearch("idx").setSize(0) .setQuery(QUERY) .addAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).order(order).minDocCount(0)) .get(); @@ -407,8 +402,7 @@ private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception final Histogram allHisto = allResponse.getAggregations().get("histo"); for (long minDocCount = 0; minDocCount < 50; ++minDocCount) { - final SearchResponse response = client().prepareSearch("idx") - .setSize(0) + final SearchResponse response = prepareSearch("idx").setSize(0) .setQuery(QUERY) .addAggregation( dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).order(order).minDocCount(minDocCount) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java index 5deeeb40953b9..eb2ad6de7789e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.core.IsNull.notNullValue; @@ -145,16 +145,14 @@ private void assertCorrectlySorted(Histogram histo, boolean asc, SubAggregation public void testTerms(String fieldName) { final boolean asc = randomBoolean(); SubAggregation agg = randomFrom(SubAggregation.values()); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").field(fieldName) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(agg.builder()) - .order(BucketOrder.aggregation(agg.sortKey(), asc)) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").field(fieldName) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(agg.builder()) + .order(BucketOrder.aggregation(agg.sortKey(), asc)) + ).get(); + + assertNoFailures(response); final Terms terms = response.getAggregations().get("terms"); assertCorrectlySorted(terms, asc, agg); } @@ -174,16 +172,14 @@ public void testDoubleTerms() { public void testLongHistogram() { final boolean asc = randomBoolean(); SubAggregation agg = randomFrom(SubAggregation.values()); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field("long_value") - .interval(randomIntBetween(1, 2)) - .subAggregation(agg.builder()) - .order(BucketOrder.aggregation(agg.sortKey(), asc)) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field("long_value") + .interval(randomIntBetween(1, 2)) + .subAggregation(agg.builder()) + .order(BucketOrder.aggregation(agg.sortKey(), asc)) + ).get(); + + assertNoFailures(response); final Histogram histo = response.getAggregations().get("histo"); assertCorrectlySorted(histo, asc, agg); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 823b4f8404176..2ab107c2580c7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -48,7 +48,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -178,11 +177,11 @@ public void setupSuiteScopeCluster() throws Exception { } public void testSimple() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(nested("nested", "nested").subAggregation(stats("nested_value_stats").field("nested.value"))) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + nested("nested", "nested").subAggregation(stats("nested_value_stats").field("nested.value")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; @@ -214,9 +213,9 @@ public void testSimple() throws Exception { } public void testNonExistingNestedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .addAggregation(nested("nested", "value").subAggregation(stats("nested_value_stats").field("nested.value"))) - .get(); + SearchResponse searchResponse = prepareSearch("idx").addAggregation( + nested("nested", "value").subAggregation(stats("nested_value_stats").field("nested.value")) + ).get(); Nested nested = searchResponse.getAggregations().get("nested"); assertThat(nested, Matchers.notNullValue()); @@ -225,13 +224,11 @@ public void testNonExistingNestedField() throws Exception { } public void testNestedWithSubTermsAgg() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - nested("nested", "nested").subAggregation(terms("values").field("nested.value").size(100).collectMode(aggCollectionMode)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + nested("nested", "nested").subAggregation(terms("values").field("nested.value").size(100).collectMode(aggCollectionMode)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); long docCount = 0; long[] counts = new long[numParents + 6]; @@ -275,16 +272,14 @@ public void testNestedWithSubTermsAgg() throws Exception { } public void testNestedAsSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("top_values").field("value") - .size(100) - .collectMode(aggCollectionMode) - .subAggregation(nested("nested", "nested").subAggregation(max("max_value").field("nested.value"))) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("top_values").field("value") + .size(100) + .collectMode(aggCollectionMode) + .subAggregation(nested("nested", "nested").subAggregation(max("max_value").field("nested.value"))) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms values = response.getAggregations().get("top_values"); assertThat(values, notNullValue()); @@ -304,16 +299,14 @@ public void testNestedAsSubAggregation() throws Exception { } public void testNestNestedAggs() throws Exception { - SearchResponse response = client().prepareSearch("idx_nested_nested_aggs") - .addAggregation( - nested("level1", "nested1").subAggregation( - terms("a").field("nested1.a.keyword") - .collectMode(aggCollectionMode) - .subAggregation(nested("level2", "nested1.nested2").subAggregation(sum("sum").field("nested1.nested2.b"))) - ) + SearchResponse response = prepareSearch("idx_nested_nested_aggs").addAggregation( + nested("level1", "nested1").subAggregation( + terms("a").field("nested1.a.keyword") + .collectMode(aggCollectionMode) + .subAggregation(nested("level2", "nested1.nested2").subAggregation(sum("sum").field("nested1.nested2.b"))) ) - .get(); - assertSearchResponse(response); + ).get(); + assertNoFailures(response); Nested level1 = response.getAggregations().get("level1"); assertThat(level1, notNullValue()); @@ -340,8 +333,7 @@ public void testNestNestedAggs() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))) .get(); @@ -476,26 +468,22 @@ public void testParentFilterResolvedCorrectly() throws Exception { }""", XContentType.JSON)); indexRandom(true, indexRequests); - SearchResponse response = client().prepareSearch("idx2") - .addAggregation( - terms("startDate").field("dates.month.start") - .subAggregation( - terms("endDate").field("dates.month.end") - .subAggregation( - terms("period").field("dates.month.label") - .subAggregation( - nested("ctxt_idfier_nested", "comments").subAggregation( - filter("comment_filter", termQuery("comments.identifier", "29111")).subAggregation( - nested("nested_tags", "comments.tags").subAggregation( - terms("tag").field("comments.tags.name") - ) - ) + SearchResponse response = prepareSearch("idx2").addAggregation( + terms("startDate").field("dates.month.start") + .subAggregation( + terms("endDate").field("dates.month.end") + .subAggregation( + terms("period").field("dates.month.label") + .subAggregation( + nested("ctxt_idfier_nested", "comments").subAggregation( + filter("comment_filter", termQuery("comments.identifier", "29111")).subAggregation( + nested("nested_tags", "comments.tags").subAggregation(terms("tag").field("comments.tags.name")) ) ) - ) - ) - ) - .get(); + ) + ) + ) + ).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -585,12 +573,10 @@ public void testNestedSameDocIdProcessedMultipleTime() throws Exception { .get(); refresh(); - SearchResponse response = client().prepareSearch("idx4") - .addAggregation( - terms("category").field("categories") - .subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id"))) - ) - .get(); + SearchResponse response = prepareSearch("idx4").addAggregation( + terms("category").field("categories") + .subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id"))) + ).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -761,33 +747,29 @@ public void testFilterAggInsideNestedAgg() throws Exception { .get(); refresh(); - SearchResponse response = client().prepareSearch("classes") - .addAggregation( - nested("to_method", "methods").subAggregation( - filter( - "num_string_params", - nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None) - ) + SearchResponse response = prepareSearch("classes").addAggregation( + nested("to_method", "methods").subAggregation( + filter( + "num_string_params", + nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None) ) ) - .get(); + ).get(); Nested toMethods = response.getAggregations().get("to_method"); Filter numStringParams = toMethods.getAggregations().get("num_string_params"); assertThat(numStringParams.getDocCount(), equalTo(3L)); - response = client().prepareSearch("classes") - .addAggregation( - nested("to_method", "methods").subAggregation( - terms("return_type").field("methods.return_type") - .subAggregation( - filter( - "num_string_params", - nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None) - ) + response = prepareSearch("classes").addAggregation( + nested("to_method", "methods").subAggregation( + terms("return_type").field("methods.return_type") + .subAggregation( + filter( + "num_string_params", + nestedQuery("methods.parameters", termQuery("methods.parameters.type", "String"), ScoreMode.None) ) - ) + ) ) - .get(); + ).get(); toMethods = response.getAggregations().get("to_method"); Terms terms = toMethods.getAggregations().get("return_type"); Bucket bucket = terms.getBucketByKey("void"); @@ -813,14 +795,11 @@ public void testExtractInnerHitBuildersWithDuplicateHitName() throws Exception { ); ensureGreen("idxduplicatehitnames"); - SearchRequestBuilder searchRequestBuilder = client().prepareSearch("idxduplicatehitnames") - .setQuery( - boolQuery().should( - nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1")) - ) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih2"))) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1"))) - ); + SearchRequestBuilder searchRequestBuilder = prepareSearch("idxduplicatehitnames").setQuery( + boolQuery().should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1"))) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih2"))) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder("ih1"))) + ); assertFailures( searchRequestBuilder, @@ -836,12 +815,11 @@ public void testExtractInnerHitBuildersWithDuplicatePath() throws Exception { ); ensureGreen("idxnullhitnames"); - SearchRequestBuilder searchRequestBuilder = client().prepareSearch("idxnullhitnames") - .setQuery( - boolQuery().should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) - .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) - ); + SearchRequestBuilder searchRequestBuilder = prepareSearch("idxnullhitnames").setQuery( + boolQuery().should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) + .should(nestedQuery("property", termQuery("property.id", 1D), ScoreMode.None).innerHit(new InnerHitBuilder())) + ); assertFailures( searchRequestBuilder, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index a22ed8e66ba9d..66978eba00e26 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -92,13 +92,11 @@ public void testRandomSampler() { double sampledDocCount = 0.0; for (int i = 0; i < NUM_SAMPLE_RUNS; i++) { - SearchRequest sampledRequest = client().prepareSearch("idx") - .addAggregation( - new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) - .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) - .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) - ) - .request(); + SearchRequest sampledRequest = prepareSearch("idx").addAggregation( + new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) + .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) + ).request(); InternalRandomSampler sampler = client().search(sampledRequest).actionGet().getAggregations().get("sampler"); sampleMonotonicValue += ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); sampleNumericValue += ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); @@ -114,8 +112,7 @@ public void testRandomSampler() { double maxCountError = 6.0 * Math.sqrt(PROBABILITY * numDocs / NUM_SAMPLE_RUNS); assertThat(Math.abs(sampledDocCount - expectedDocCount), lessThan(maxCountError)); - SearchResponse trueValueResponse = client().prepareSearch("idx") - .addAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + SearchResponse trueValueResponse = prepareSearch("idx").addAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) .addAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) .get(); double trueMonotonic = ((Avg) trueValueResponse.getAggregations().get("mean_monotonic")).getValue(); @@ -132,17 +129,15 @@ public void testRandomSamplerHistogram() { Map sampledDocCount = new HashMap<>(); for (int i = 0; i < NUM_SAMPLE_RUNS; i++) { - SearchRequest sampledRequest = client().prepareSearch("idx") - .addAggregation( - new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) - .subAggregation( - histogram("histo").field(NUMERIC_VALUE) - .interval(5.0) - .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) - .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) - ) - ) - .request(); + SearchRequest sampledRequest = prepareSearch("idx").addAggregation( + new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) + .subAggregation( + histogram("histo").field(NUMERIC_VALUE) + .interval(5.0) + .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) + ) + ).request(); InternalRandomSampler sampler = client().search(sampledRequest).actionGet().getAggregations().get("sampler"); Histogram histo = sampler.getAggregations().get("histo"); for (Histogram.Bucket bucket : histo.getBuckets()) { @@ -163,14 +158,12 @@ public void testRandomSamplerHistogram() { sampleMonotonicValue.put(key, sampleMonotonicValue.get(key) / NUM_SAMPLE_RUNS); } - SearchResponse trueValueResponse = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(NUMERIC_VALUE) - .interval(5.0) - .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) - .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) - ) - .get(); + SearchResponse trueValueResponse = prepareSearch("idx").addAggregation( + histogram("histo").field(NUMERIC_VALUE) + .interval(5.0) + .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) + ).get(); Histogram histogram = trueValueResponse.getAggregations().get("histo"); for (Histogram.Bucket bucket : histogram.getBuckets()) { long numDocs = bucket.getDocCount(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 95f60df960ab8..742d403ba42b0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -40,7 +40,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -133,16 +133,14 @@ public void setupSuiteScopeCluster() throws Exception { } public void testRangeAsSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").field(MULTI_VALUED_FIELD_NAME) - .size(100) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").field(MULTI_VALUED_FIELD_NAME) + .size(100) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) + ).get(); + + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getBuckets().size(), equalTo(numDocs + 1)); @@ -196,11 +194,11 @@ public void testRangeAsSubAggregation() throws Exception { } public void testSingleValueField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -237,11 +235,11 @@ public void testSingleValueField() throws Exception { } public void testSingleValueFieldWithFormat() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6).format("#")) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6).format("#") + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -278,13 +276,11 @@ public void testSingleValueFieldWithFormat() throws Exception { } public void testSingleValueFieldWithCustomKey() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo("r1", 3).addRange("r2", 3, 6).addUnboundedFrom("r3", 6) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo("r1", 3).addRange("r2", 3, 6).addUnboundedFrom("r3", 6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -321,17 +317,15 @@ public void testSingleValueFieldWithCustomKey() throws Exception { } public void testSingleValuedFieldWithSubAggregation() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -393,17 +387,15 @@ public void testSingleValuedFieldWithSubAggregation() throws Exception { } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - range("range").field(SINGLE_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -453,11 +445,11 @@ public void testSingleValuedFieldWithValueScript() throws Exception { */ public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -507,17 +499,15 @@ public void testMultiValuedField() throws Exception { */ public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - range("range").field(MULTI_VALUED_FIELD_NAME) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) - .addUnboundedTo(3) - .addRange(3, 6) - .addUnboundedFrom(6) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(MULTI_VALUED_FIELD_NAME) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) + .addUnboundedTo(3) + .addRange(3, 6) + .addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -577,11 +567,11 @@ public void testScriptSingleValue() throws Exception { "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range").script(script).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").script(script).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -618,11 +608,11 @@ public void testScriptSingleValue() throws Exception { } public void testEmptyRange() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(-1).addUnboundedFrom(1000)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(-1).addUnboundedFrom(1000) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -651,7 +641,7 @@ public void testEmptyRange() throws Exception { public void testNoRangesInQuery() { try { - client().prepareSearch("idx").addAggregation(range("foobar").field(SINGLE_VALUED_FIELD_NAME)).get(); + prepareSearch("idx").addAggregation(range("foobar").field(SINGLE_VALUED_FIELD_NAME)).get(); fail(); } catch (SearchPhaseExecutionException spee) { Throwable rootCause = spee.getCause().getCause(); @@ -668,11 +658,11 @@ public void testScriptMultiValued() throws Exception { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation(range("range").script(script).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").script(script).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -726,11 +716,11 @@ public void testScriptMultiValued() throws Exception { */ public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - .get(); + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -769,11 +759,11 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() throws Exception { clusterAdmin().prepareHealth("idx_unmapped").setWaitForYellowStatus().get(); - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6)) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + range("range").field(SINGLE_VALUED_FIELD_NAME).addUnboundedTo(3).addRange(3, 6).addUnboundedFrom(6) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -810,13 +800,11 @@ public void testPartiallyUnmapped() throws Exception { } public void testOverlappingRanges() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(5).addRange(3, 6).addRange(4, 5).addUnboundedFrom(4) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + range("range").field(MULTI_VALUED_FIELD_NAME).addUnboundedTo(5).addRange(3, 6).addRange(4, 5).addUnboundedFrom(4) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -862,8 +850,7 @@ public void testOverlappingRanges() throws Exception { } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field(SINGLE_VALUED_FIELD_NAME) .interval(1L) @@ -901,7 +888,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("i", "type=integer") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -922,15 +908,14 @@ public void testScriptCaching() throws Exception { // Test that a request using a nondeterministic script does not get cached Map params = new HashMap<>(); params.put("fieldname", "date"); - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( range("foo").field("i") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) .addRange(0, 10) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -942,15 +927,14 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( range("foo").field("i") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value + 1", Collections.emptyMap())) .addRange(0, 10) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -962,8 +946,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(range("foo").field("i").addRange(0, 10)).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(range("foo").field("i").addRange(0, 10)).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -976,11 +960,11 @@ public void testScriptCaching() throws Exception { } public void testFieldAlias() { - SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(range("range").field("route_length_miles").addUnboundedTo(50.0).addRange(50.0, 150.0).addUnboundedFrom(150.0)) - .get(); + SearchResponse response = prepareSearch("old_index", "new_index").addAggregation( + range("range").field("route_length_miles").addUnboundedTo(50.0).addRange(50.0, 150.0).addUnboundedFrom(150.0) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -1005,13 +989,11 @@ public void testFieldAlias() { } public void testFieldAliasWithMissingValue() { - SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation( - range("range").field("route_length_miles").missing(0.0).addUnboundedTo(50.0).addRange(50.0, 150.0).addUnboundedFrom(150.0) - ) - .get(); + SearchResponse response = prepareSearch("old_index", "new_index").addAggregation( + range("range").field("route_length_miles").missing(0.0).addUnboundedTo(50.0).addRange(50.0, 150.0).addUnboundedFrom(150.0) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java index 5a8c560ad98f0..e90e73eec5bb3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java @@ -35,7 +35,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -144,20 +143,18 @@ private void insertIdx2(String[][] values) throws Exception { } public void testSimpleReverseNestedToRoot() throws Exception { - SearchResponse response = client().prepareSearch("idx1") - .addAggregation( - nested("nested1", "nested1").subAggregation( - terms("field2").field("nested1.field2") - .subAggregation( - reverseNested("nested1_to_field1").subAggregation( - terms("field1").field("field1").collectMode(randomFrom(SubAggCollectionMode.values())) - ) + SearchResponse response = prepareSearch("idx1").addAggregation( + nested("nested1", "nested1").subAggregation( + terms("field2").field("nested1.field2") + .subAggregation( + reverseNested("nested1_to_field1").subAggregation( + terms("field1").field("field1").collectMode(randomFrom(SubAggCollectionMode.values())) ) - ) + ) ) - .get(); + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Nested nested = response.getAggregations().get("nested1"); assertThat(nested, notNullValue()); @@ -331,15 +328,13 @@ public void testSimpleReverseNestedToRoot() throws Exception { } public void testSimpleNested1ToRootToNested2() throws Exception { - SearchResponse response = client().prepareSearch("idx2") - .addAggregation( - nested("nested1", "nested1").subAggregation( - reverseNested("nested1_to_root").subAggregation(nested("root_to_nested2", "nested1.nested2")) - ) + SearchResponse response = prepareSearch("idx2").addAggregation( + nested("nested1", "nested1").subAggregation( + reverseNested("nested1_to_root").subAggregation(nested("root_to_nested2", "nested1.nested2")) ) - .get(); + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Nested nested = response.getAggregations().get("nested1"); assertThat(nested.getName(), equalTo("nested1")); assertThat(nested.getDocCount(), equalTo(9L)); @@ -352,26 +347,24 @@ public void testSimpleNested1ToRootToNested2() throws Exception { } public void testSimpleReverseNestedToNested1() throws Exception { - SearchResponse response = client().prepareSearch("idx2") - .addAggregation( - nested("nested1", "nested1.nested2").subAggregation( - terms("field2").field("nested1.nested2.field2") - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .size(10000) - .subAggregation( - reverseNested("nested1_to_field1").path("nested1") - .subAggregation( - terms("field1").field("nested1.field1") - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - ) + SearchResponse response = prepareSearch("idx2").addAggregation( + nested("nested1", "nested1.nested2").subAggregation( + terms("field2").field("nested1.nested2.field2") + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .size(10000) + .subAggregation( + reverseNested("nested1_to_field1").path("nested1") + .subAggregation( + terms("field1").field("nested1.field1") + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ) ) - .get(); + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Nested nested = response.getAggregations().get("nested1"); assertThat(nested, notNullValue()); @@ -458,17 +451,15 @@ public void testSimpleReverseNestedToNested1() throws Exception { public void testReverseNestedAggWithoutNestedAgg() { try { - client().prepareSearch("idx2") - .addAggregation( - terms("field2").field("nested1.nested2.field2") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation( - reverseNested("nested1_to_field1").subAggregation( - terms("field1").field("nested1.field1").collectMode(randomFrom(SubAggCollectionMode.values())) - ) + prepareSearch("idx2").addAggregation( + terms("field2").field("nested1.nested2.field2") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation( + reverseNested("nested1_to_field1").subAggregation( + terms("field1").field("nested1.field1").collectMode(randomFrom(SubAggCollectionMode.values())) ) - ) - .get(); + ) + ).get(); fail("Expected SearchPhaseExecutionException"); } catch (SearchPhaseExecutionException e) { assertThat(e.getMessage(), is("all shards failed")); @@ -476,8 +467,7 @@ public void testReverseNestedAggWithoutNestedAgg() { } public void testNonExistingNestedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx2") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx2").setQuery(matchAllQuery()) .addAggregation(nested("nested2", "nested1.nested2").subAggregation(reverseNested("incorrect").path("nested3"))) .get(); @@ -489,8 +479,7 @@ public void testNonExistingNestedField() throws Exception { assertThat(reverseNested.getDocCount(), is(0L)); // Test that parsing the reverse_nested agg doesn't fail, because the parent nested agg is unmapped: - searchResponse = client().prepareSearch("idx1") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("idx1").setQuery(matchAllQuery()) .addAggregation(nested("incorrect1", "incorrect1").subAggregation(reverseNested("incorrect2").path("incorrect2"))) .get(); @@ -614,22 +603,20 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { ) .get(); - SearchResponse response = client().prepareSearch("idx3") - .addAggregation( - nested("nested_0", "category").subAggregation( - terms("group_by_category").field("category.name") - .subAggregation( - reverseNested("to_root").subAggregation( - nested("nested_1", "sku").subAggregation( - filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( - count("sku_count").field("sku.sku_type") - ) + SearchResponse response = prepareSearch("idx3").addAggregation( + nested("nested_0", "category").subAggregation( + terms("group_by_category").field("category.name") + .subAggregation( + reverseNested("to_root").subAggregation( + nested("nested_1", "sku").subAggregation( + filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( + count("sku_count").field("sku.sku_type") ) ) ) - ) + ) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); @@ -651,27 +638,25 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { assertThat(barCount.getValue(), equalTo(3L)); } - response = client().prepareSearch("idx3") - .addAggregation( - nested("nested_0", "category").subAggregation( - terms("group_by_category").field("category.name") - .subAggregation( - reverseNested("to_root").subAggregation( - nested("nested_1", "sku").subAggregation( - filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( - nested("nested_2", "sku.colors").subAggregation( - filter("filter_sku_color", termQuery("sku.colors.name", "red")).subAggregation( - reverseNested("reverse_to_sku").path("sku") - .subAggregation(count("sku_count").field("sku.sku_type")) - ) + response = prepareSearch("idx3").addAggregation( + nested("nested_0", "category").subAggregation( + terms("group_by_category").field("category.name") + .subAggregation( + reverseNested("to_root").subAggregation( + nested("nested_1", "sku").subAggregation( + filter("filter_by_sku", termQuery("sku.sku_type", "bar1")).subAggregation( + nested("nested_2", "sku.colors").subAggregation( + filter("filter_sku_color", termQuery("sku.colors.name", "red")).subAggregation( + reverseNested("reverse_to_sku").path("sku") + .subAggregation(count("sku_count").field("sku.sku_type")) ) ) ) ) ) - ) + ) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); @@ -701,20 +686,18 @@ public void testSameParentDocHavingMultipleBuckets() throws Exception { } public void testFieldAlias() { - SearchResponse response = client().prepareSearch("idx1") - .addAggregation( - nested("nested1", "nested1").subAggregation( - terms("field2").field("nested1.field2") - .subAggregation( - reverseNested("nested1_to_field1").subAggregation( - terms("field1").field("alias").collectMode(randomFrom(SubAggCollectionMode.values())) - ) + SearchResponse response = prepareSearch("idx1").addAggregation( + nested("nested1", "nested1").subAggregation( + terms("field2").field("nested1.field2") + .subAggregation( + reverseNested("nested1_to_field1").subAggregation( + terms("field1").field("alias").collectMode(randomFrom(SubAggCollectionMode.values())) ) - ) + ) ) - .get(); + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Nested nested = response.getAggregations().get("nested1"); Terms nestedTerms = nested.getAggregations().get("field2"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java index a9604ce1c62f3..f6d7d37a29136 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java @@ -26,7 +26,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -91,15 +91,14 @@ public void testIssue10719() throws Exception { // Tests that we can refer to nested elements under a sample in a path // statement boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .addAggregation( terms("genres").field("genre") .order(BucketOrder.aggregation("sample>max_price.value", asc)) .subAggregation(sampler("sample").shardSize(100).subAggregation(max("max_price").field("price"))) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms genres = response.getAggregations().get("genres"); List genreBuckets = genres.getBuckets(); // For this test to be useful we need >1 genre bucket to compare @@ -122,14 +121,13 @@ public void testIssue10719() throws Exception { public void testSimpleSampler() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); Sampler sample = response.getAggregations().get("sample"); Terms authors = sample.getAggregations().get("authors"); List testBuckets = authors.getBuckets(); @@ -144,14 +142,13 @@ public void testSimpleSampler() throws Exception { public void testUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("idx_unmapped") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("idx_unmapped").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), equalTo(0L)); Terms authors = sample.getAggregations().get("authors"); @@ -161,15 +158,14 @@ public void testUnmappedChildAggNoDiversity() throws Exception { public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(100); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("idx_unmapped", "test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("idx_unmapped", "test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .setExplain(true) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); Sampler sample = response.getAggregations().get("sample"); assertThat(sample.getDocCount(), greaterThan(0L)); Terms authors = sample.getAggregations().get("authors"); @@ -179,13 +175,12 @@ public void testPartiallyUnmappedChildAggNoDiversity() throws Exception { public void testRidiculousShardSizeSampler() throws Exception { SamplerAggregationBuilder sampleAgg = sampler("sample").shardSize(Integer.MAX_VALUE); sampleAgg.subAggregation(terms("authors").field("author")); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(new TermQueryBuilder("genre", "fantasy")) .setFrom(0) .setSize(60) .addAggregation(sampleAgg) .get(); - assertSearchResponse(response); + assertNoFailures(response); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java index 1dba19319efc2..b0f9556bc842b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardReduceIT.java @@ -36,7 +36,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -87,8 +87,7 @@ public void setupSuiteScopeCluster() throws Exception { } public void testGlobal() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( global("global").subAggregation( dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0) @@ -96,7 +95,7 @@ public void testGlobal() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Global global = response.getAggregations().get("global"); Histogram histo = global.getAggregations().get("histo"); @@ -104,8 +103,7 @@ public void testGlobal() throws Exception { } public void testFilter() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( filter("filter", QueryBuilders.matchAllQuery()).subAggregation( dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0) @@ -113,7 +111,7 @@ public void testFilter() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Filter filter = response.getAggregations().get("filter"); Histogram histo = filter.getAggregations().get("histo"); @@ -121,15 +119,14 @@ public void testFilter() throws Exception { } public void testMissing() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( missing("missing").field("foobar") .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Missing missing = response.getAggregations().get("missing"); Histogram histo = missing.getAggregations().get("histo"); @@ -137,8 +134,7 @@ public void testMissing() throws Exception { } public void testGlobalWithFilterWithMissing() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( global("global").subAggregation( filter("filter", QueryBuilders.matchAllQuery()).subAggregation( @@ -149,7 +145,7 @@ public void testGlobalWithFilterWithMissing() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Global global = response.getAggregations().get("global"); Filter filter = global.getAggregations().get("filter"); @@ -159,8 +155,7 @@ public void testGlobalWithFilterWithMissing() throws Exception { } public void testNested() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( nested("nested", "nested").subAggregation( dateHistogram("histo").field("nested.date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0) @@ -168,7 +163,7 @@ public void testNested() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Nested nested = response.getAggregations().get("nested"); Histogram histo = nested.getAggregations().get("histo"); @@ -176,8 +171,7 @@ public void testNested() throws Exception { } public void testStringTerms() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( terms("terms").field("term-s") .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -185,7 +179,7 @@ public void testStringTerms() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); Histogram histo = terms.getBucketByKey("term").getAggregations().get("histo"); @@ -193,8 +187,7 @@ public void testStringTerms() throws Exception { } public void testLongTerms() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( terms("terms").field("term-l") .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -202,7 +195,7 @@ public void testLongTerms() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); Histogram histo = terms.getBucketByKey("1").getAggregations().get("histo"); @@ -210,8 +203,7 @@ public void testLongTerms() throws Exception { } public void testDoubleTerms() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( terms("terms").field("term-d") .collectMode(randomFrom(SubAggCollectionMode.values())) @@ -219,7 +211,7 @@ public void testDoubleTerms() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); Histogram histo = terms.getBucketByKey("1.5").getAggregations().get("histo"); @@ -227,8 +219,7 @@ public void testDoubleTerms() throws Exception { } public void testRange() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( range("range").field("value") .addRange("r1", 0, 10) @@ -236,7 +227,7 @@ public void testRange() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); Histogram histo = range.getBuckets().get(0).getAggregations().get("histo"); @@ -244,8 +235,7 @@ public void testRange() throws Exception { } public void testDateRange() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( dateRange("range").field("date") .addRange("r1", "2014-01-01", "2014-01-10") @@ -253,7 +243,7 @@ public void testDateRange() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); Histogram histo = range.getBuckets().get(0).getAggregations().get("histo"); @@ -261,8 +251,7 @@ public void testDateRange() throws Exception { } public void testIpRange() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( ipRange("range").field("ip") .addRange("r1", "10.0.0.1", "10.0.0.10") @@ -270,7 +259,7 @@ public void testIpRange() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); Histogram histo = range.getBuckets().get(0).getAggregations().get("histo"); @@ -278,8 +267,7 @@ public void testIpRange() throws Exception { } public void testHistogram() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( histogram("topHisto").field("value") .interval(5) @@ -287,7 +275,7 @@ public void testHistogram() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram topHisto = response.getAggregations().get("topHisto"); Histogram histo = topHisto.getBuckets().get(0).getAggregations().get("histo"); @@ -295,8 +283,7 @@ public void testHistogram() throws Exception { } public void testDateHistogram() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( dateHistogram("topHisto").field("date") .calendarInterval(DateHistogramInterval.MONTH) @@ -304,7 +291,7 @@ public void testDateHistogram() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram topHisto = response.getAggregations().get("topHisto"); Histogram histo = topHisto.getBuckets().iterator().next().getAggregations().get("histo"); @@ -313,15 +300,14 @@ public void testDateHistogram() throws Exception { } public void testGeoHashGrid() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( geohashGrid("grid").field("location") .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid grid = response.getAggregations().get("grid"); Histogram histo = grid.getBuckets().iterator().next().getAggregations().get("histo"); @@ -329,15 +315,14 @@ public void testGeoHashGrid() throws Exception { } public void testGeoTileGrid() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.matchAllQuery()) .addAggregation( geotileGrid("grid").field("location") .subAggregation(dateHistogram("histo").field("date").fixedInterval(DateHistogramInterval.DAY).minDocCount(0)) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); GeoGrid grid = response.getAggregations().get("grid"); Histogram histo = grid.getBuckets().iterator().next().getAggregations().get("histo"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java index dc3cd1c897780..2c0c7766b646c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -26,8 +26,7 @@ public void testNoShardSizeString() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) ) @@ -50,8 +49,7 @@ public void testShardSizeEqualsSizeString() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") .size(3) @@ -79,8 +77,7 @@ public void testWithShardSizeString() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") .size(3) @@ -108,8 +105,7 @@ public void testWithShardSizeStringSingleShard() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setRouting(routing1) + SearchResponse response = prepareSearch("idx").setRouting(routing1) .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -137,8 +133,7 @@ public void testNoShardSizeTermOrderString() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) ) @@ -161,8 +156,7 @@ public void testNoShardSizeLong() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) ) @@ -185,8 +179,7 @@ public void testShardSizeEqualsSizeLong() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") .size(3) @@ -213,8 +206,7 @@ public void testWithShardSizeLong() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") .size(3) @@ -242,8 +234,7 @@ public void testWithShardSizeLongSingleShard() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setRouting(routing1) + SearchResponse response = prepareSearch("idx").setRouting(routing1) .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -271,8 +262,7 @@ public void testNoShardSizeTermOrderLong() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) ) @@ -295,8 +285,7 @@ public void testNoShardSizeDouble() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)) ) @@ -319,8 +308,7 @@ public void testShardSizeEqualsSizeDouble() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") .size(3) @@ -347,8 +335,7 @@ public void testWithShardSizeDouble() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") .size(3) @@ -375,8 +362,7 @@ public void testWithShardSizeDoubleSingleShard() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setRouting(routing1) + SearchResponse response = prepareSearch("idx").setRouting(routing1) .setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key") @@ -404,8 +390,7 @@ public void testNoShardSizeTermOrderDouble() throws Exception { indexData(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)) ) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index c8d89785fc4af..4d94173f8d978 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -57,7 +57,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.significantText; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -121,16 +121,18 @@ public void testXContentResponse() throws Exception { if ("text".equals(type) && randomBoolean()) { // Use significant_text on text fields but occasionally run with alternative of // significant_terms on legacy fieldData=true too. - request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD))); + request = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD)) + ); } else { - request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))); + request = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)) + ); } SearchResponse response = request.get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { @@ -226,13 +228,13 @@ public void testPopularTermManyDeletedDocs() throws Exception { SearchRequestBuilder request; if (randomBoolean()) { - request = client().prepareSearch(INDEX_NAME) - .addAggregation( - terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1)) - ); + request = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1)) + ); } else { - request = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1))); + request = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1)) + ); } request.get(); @@ -266,39 +268,36 @@ public void testBackgroundVsSeparateSet( final boolean useSigText = randomBoolean() && type.equals("text"); SearchRequestBuilder request1; if (useSigText) { - request1 = client().prepareSearch(INDEX_NAME) - .addAggregation( - terms("class").field(CLASS_FIELD) - .subAggregation( - significantText("sig_terms", TEXT_FIELD).minDocCount(1) - .significanceHeuristic(significanceHeuristicExpectingSuperset) - ) - ); + request1 = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantText("sig_terms", TEXT_FIELD).minDocCount(1) + .significanceHeuristic(significanceHeuristicExpectingSuperset) + ) + ); } else { - request1 = client().prepareSearch(INDEX_NAME) - .addAggregation( - terms("class").field(CLASS_FIELD) - .subAggregation( - significantTerms("sig_terms").field(TEXT_FIELD) - .minDocCount(1) - .significanceHeuristic(significanceHeuristicExpectingSuperset) - ) - ); + request1 = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantTerms("sig_terms").field(TEXT_FIELD) + .minDocCount(1) + .significanceHeuristic(significanceHeuristicExpectingSuperset) + ) + ); } SearchResponse response1 = request1.get(); - assertSearchResponse(response1); + assertNoFailures(response1); SearchRequestBuilder request2; if (useSigText) { - request2 = client().prepareSearch(INDEX_NAME) - .addAggregation( - filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( - significantText("sig_terms", TEXT_FIELD).minDocCount(1) - .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) - .significanceHeuristic(significanceHeuristicExpectingSeparateSets) - ) + request2 = prepareSearch(INDEX_NAME).addAggregation( + filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( + significantText("sig_terms", TEXT_FIELD).minDocCount(1) + .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) + .significanceHeuristic(significanceHeuristicExpectingSeparateSets) ) + ) .addAggregation( filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")).subAggregation( significantText("sig_terms", TEXT_FIELD).minDocCount(1) @@ -307,15 +306,14 @@ public void testBackgroundVsSeparateSet( ) ); } else { - request2 = client().prepareSearch(INDEX_NAME) - .addAggregation( - filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( - significantTerms("sig_terms").field(TEXT_FIELD) - .minDocCount(1) - .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) - .significanceHeuristic(significanceHeuristicExpectingSeparateSets) - ) + request2 = prepareSearch(INDEX_NAME).addAggregation( + filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation( + significantTerms("sig_terms").field(TEXT_FIELD) + .minDocCount(1) + .backgroundFilter(QueryBuilders.termQuery(CLASS_FIELD, "1")) + .significanceHeuristic(significanceHeuristicExpectingSeparateSets) ) + ) .addAggregation( filter("1", QueryBuilders.termQuery(CLASS_FIELD, "1")).subAggregation( significantTerms("sig_terms").field(TEXT_FIELD) @@ -365,34 +363,32 @@ public void testScoresEqualForPositiveAndNegative(SignificanceHeuristic heuristi // check that results for both classes are the same with exclude negatives = false and classes are routing ids SearchRequestBuilder request; if (randomBoolean()) { - request = client().prepareSearch("test") - .addAggregation( - terms("class").field("class") - .subAggregation( - significantTerms("mySignificantTerms").field("text") - .executionHint(randomExecutionHint()) - .significanceHeuristic(heuristic) - .minDocCount(1) - .shardSize(1000) - .size(1000) - ) - ); + request = prepareSearch("test").addAggregation( + terms("class").field("class") + .subAggregation( + significantTerms("mySignificantTerms").field("text") + .executionHint(randomExecutionHint()) + .significanceHeuristic(heuristic) + .minDocCount(1) + .shardSize(1000) + .size(1000) + ) + ); } else { - request = client().prepareSearch("test") - .addAggregation( - terms("class").field("class") - .subAggregation( - significantText("mySignificantTerms", "text").significanceHeuristic(heuristic) - .minDocCount(1) - .shardSize(1000) - .size(1000) - ) - ); + request = prepareSearch("test").addAggregation( + terms("class").field("class") + .subAggregation( + significantText("mySignificantTerms", "text").significanceHeuristic(heuristic) + .minDocCount(1) + .shardSize(1000) + .size(1000) + ) + ); } SearchResponse response = request.get(); - assertSearchResponse(response); + assertNoFailures(response); - assertSearchResponse(response); + assertNoFailures(response); StringTerms classes = response.getAggregations().get("class"); assertThat(classes.getBuckets().size(), equalTo(2)); Iterator classBuckets = classes.getBuckets().iterator(); @@ -427,8 +423,8 @@ public void testSubAggregations() throws Exception { .size(1000) .subAggregation(subAgg); - SearchResponse response = client().prepareSearch("test").setQuery(query).addAggregation(agg).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test").setQuery(query).addAggregation(agg).get(); + assertNoFailures(response); SignificantTerms sigTerms = response.getAggregations().get("significant_terms"); assertThat(sigTerms.getBuckets().size(), equalTo(2)); @@ -479,32 +475,30 @@ public void testScriptScore() throws ExecutionException, InterruptedException, I SearchRequestBuilder request; if ("text".equals(type) && randomBoolean()) { - request = client().prepareSearch(INDEX_NAME) - .addAggregation( - terms("class").field(CLASS_FIELD) - .subAggregation( - significantText("mySignificantTerms", TEXT_FIELD).significanceHeuristic(scriptHeuristic) - .minDocCount(1) - .shardSize(2) - .size(2) - ) - ); + request = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantText("mySignificantTerms", TEXT_FIELD).significanceHeuristic(scriptHeuristic) + .minDocCount(1) + .shardSize(2) + .size(2) + ) + ); } else { - request = client().prepareSearch(INDEX_NAME) - .addAggregation( - terms("class").field(CLASS_FIELD) - .subAggregation( - significantTerms("mySignificantTerms").field(TEXT_FIELD) - .executionHint(randomExecutionHint()) - .significanceHeuristic(scriptHeuristic) - .minDocCount(1) - .shardSize(2) - .size(2) - ) - ); + request = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD) + .subAggregation( + significantTerms("mySignificantTerms").field(TEXT_FIELD) + .executionHint(randomExecutionHint()) + .significanceHeuristic(scriptHeuristic) + .minDocCount(1) + .shardSize(2) + .size(2) + ) + ); } SearchResponse response = request.get(); - assertSearchResponse(response); + assertNoFailures(response); for (Terms.Bucket classBucket : ((Terms) response.getAggregations().get("class")).getBuckets()) { SignificantTerms sigTerms = classBucket.getAggregations().get("mySignificantTerms"); for (SignificantTerms.Bucket bucket : sigTerms.getBuckets()) { @@ -563,7 +557,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("s", "type=long", "t", "type=text") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -588,17 +581,15 @@ public void testScriptCaching() throws Exception { boolean useSigText = randomBoolean(); SearchResponse r; if (useSigText) { - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(significantText("foo", "t").significanceHeuristic(scriptHeuristic)) .get(); } else { - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(significantTerms("foo").field("s").significanceHeuristic(scriptHeuristic)) .get(); } - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -613,17 +604,15 @@ public void testScriptCaching() throws Exception { scriptHeuristic = getScriptSignificanceHeuristic(); useSigText = randomBoolean(); if (useSigText) { - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(significantText("foo", "t").significanceHeuristic(scriptHeuristic)) .get(); } else { - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(significantTerms("foo").field("s").significanceHeuristic(scriptHeuristic)) .get(); } - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -636,11 +625,11 @@ public void testScriptCaching() throws Exception { // Ensure that non-scripted requests are cached as normal if (useSigText) { - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(significantText("foo", "t")).get(); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(significantText("foo", "t")).get(); } else { - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(significantTerms("foo").field("s")).get(); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(significantTerms("foo").field("s")).get(); } - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java index 46c2c693132cc..58609df7ae8fe 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -267,31 +267,27 @@ private void assertUnboundedDocCountError(int size, SearchResponse accurateRespo public void testStringValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertDocCountErrorWithinBounds(size, accurateResponse, testResponse); } @@ -299,31 +295,27 @@ public void testStringValueField() throws Exception { public void testStringValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -332,8 +324,7 @@ public void testStringValueFieldWithRouting() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setRouting(String.valueOf(between(1, numRoutingValues))) + SearchResponse testResponse = prepareSearch("idx_with_routing").setRouting(String.valueOf(between(1, numRoutingValues))) .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(STRING_FIELD_NAME) @@ -344,7 +335,7 @@ public void testStringValueFieldWithRouting() throws Exception { ) .get(); - assertSearchResponse(testResponse); + assertNoFailures(testResponse); assertNoDocCountErrorSingleResponse(size, testResponse); } @@ -352,33 +343,29 @@ public void testStringValueFieldWithRouting() throws Exception { public void testStringValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -386,33 +373,29 @@ public void testStringValueFieldDocCountAsc() throws Exception { public void testStringValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -420,33 +403,29 @@ public void testStringValueFieldTermSortAsc() throws Exception { public void testStringValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -454,35 +433,31 @@ public void testStringValueFieldTermSortDesc() throws Exception { public void testStringValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -490,35 +465,31 @@ public void testStringValueFieldSubAggAsc() throws Exception { public void testStringValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -526,31 +497,27 @@ public void testStringValueFieldSubAggDesc() throws Exception { public void testLongValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertDocCountErrorWithinBounds(size, accurateResponse, testResponse); } @@ -558,31 +525,27 @@ public void testLongValueField() throws Exception { public void testLongValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -591,8 +554,7 @@ public void testLongValueFieldWithRouting() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setRouting(String.valueOf(between(1, numRoutingValues))) + SearchResponse testResponse = prepareSearch("idx_with_routing").setRouting(String.valueOf(between(1, numRoutingValues))) .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(LONG_FIELD_NAME) @@ -603,7 +565,7 @@ public void testLongValueFieldWithRouting() throws Exception { ) .get(); - assertSearchResponse(testResponse); + assertNoFailures(testResponse); assertNoDocCountErrorSingleResponse(size, testResponse); } @@ -611,33 +573,29 @@ public void testLongValueFieldWithRouting() throws Exception { public void testLongValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -645,33 +603,29 @@ public void testLongValueFieldDocCountAsc() throws Exception { public void testLongValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -679,33 +633,29 @@ public void testLongValueFieldTermSortAsc() throws Exception { public void testLongValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -713,35 +663,31 @@ public void testLongValueFieldTermSortDesc() throws Exception { public void testLongValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -749,35 +695,31 @@ public void testLongValueFieldSubAggAsc() throws Exception { public void testLongValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(LONG_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(LONG_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -785,31 +727,27 @@ public void testLongValueFieldSubAggDesc() throws Exception { public void testDoubleValueField() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertDocCountErrorWithinBounds(size, accurateResponse, testResponse); } @@ -817,31 +755,27 @@ public void testDoubleValueField() throws Exception { public void testDoubleValueFieldSingleShard() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -850,8 +784,7 @@ public void testDoubleValueFieldWithRouting() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse testResponse = client().prepareSearch("idx_with_routing") - .setRouting(String.valueOf(between(1, numRoutingValues))) + SearchResponse testResponse = prepareSearch("idx_with_routing").setRouting(String.valueOf(between(1, numRoutingValues))) .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(DOUBLE_FIELD_NAME) @@ -862,7 +795,7 @@ public void testDoubleValueFieldWithRouting() throws Exception { ) .get(); - assertSearchResponse(testResponse); + assertNoFailures(testResponse); assertNoDocCountErrorSingleResponse(size, testResponse); } @@ -870,33 +803,29 @@ public void testDoubleValueFieldWithRouting() throws Exception { public void testDoubleValueFieldDocCountAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.count(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.count(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -904,33 +833,29 @@ public void testDoubleValueFieldDocCountAsc() throws Exception { public void testDoubleValueFieldTermSortAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -938,33 +863,29 @@ public void testDoubleValueFieldTermSortAsc() throws Exception { public void testDoubleValueFieldTermSortDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.key(false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.key(false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + + assertNoFailures(testResponse); assertNoDocCountError(size, accurateResponse, testResponse); } @@ -972,35 +893,31 @@ public void testDoubleValueFieldTermSortDesc() throws Exception { public void testDoubleValueFieldSubAggAsc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", true)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", true)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -1008,35 +925,31 @@ public void testDoubleValueFieldSubAggAsc() throws Exception { public void testDoubleValueFieldSubAggDesc() throws Exception { int size = randomIntBetween(1, 20); int shardSize = randomIntBetween(size, size * 2); - SearchResponse accurateResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(10000) - .shardSize(10000) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(accurateResponse); - - SearchResponse testResponse = client().prepareSearch("idx_single_shard") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(DOUBLE_FIELD_NAME) - .showTermDocCountError(true) - .size(size) - .shardSize(shardSize) - .order(BucketOrder.aggregation("sortAgg", false)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) - ) - .get(); - - assertSearchResponse(testResponse); + SearchResponse accurateResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(10000) + .shardSize(10000) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(accurateResponse); + + SearchResponse testResponse = prepareSearch("idx_single_shard").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(DOUBLE_FIELD_NAME) + .showTermDocCountError(true) + .size(size) + .shardSize(shardSize) + .order(BucketOrder.aggregation("sortAgg", false)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)) + ).get(); + + assertNoFailures(testResponse); assertUnboundedDocCountError(size, accurateResponse, testResponse); } @@ -1047,17 +960,15 @@ public void testDoubleValueFieldSubAggDesc() throws Exception { * 3 one-shard indices. */ public void testFixedDocs() throws Exception { - SearchResponse response = client().prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(5) - .shardSize(5) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(5) + .shardSize(5) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -1102,17 +1013,15 @@ public void testFixedDocs() throws Exception { * See https://github.com/elastic/elasticsearch/issues/40005 for more details */ public void testIncrementalReduction() { - SearchResponse response = client().prepareSearch("idx_fixed_docs_3", "idx_fixed_docs_4", "idx_fixed_docs_5") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(STRING_FIELD_NAME) - .showTermDocCountError(true) - .size(5) - .shardSize(5) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx_fixed_docs_3", "idx_fixed_docs_4", "idx_fixed_docs_5").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(STRING_FIELD_NAME) + .showTermDocCountError(true) + .size(5) + .shardSize(5) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms.getDocCountError(), equalTo(0L)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 85cc185d1f558..ffb9539bee735 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -25,7 +25,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; public class TermsShardMinDocCountIT extends ESIntegTestCase { @@ -61,35 +61,31 @@ public void testShardMinDocCountSignificantTermsTest() throws Exception { indexRandom(true, false, indexBuilders); // first, check that indeed when not setting the shardMinDocCount parameter 0 terms are returned - SearchResponse response = client().prepareSearch(index) - .addAggregation( - (filter("inclass", QueryBuilders.termQuery("class", true))).subAggregation( - significantTerms("mySignificantTerms").field("text") - .minDocCount(2) - .size(2) - .shardSize(2) - .executionHint(randomExecutionHint()) - ) + SearchResponse response = prepareSearch(index).addAggregation( + (filter("inclass", QueryBuilders.termQuery("class", true))).subAggregation( + significantTerms("mySignificantTerms").field("text") + .minDocCount(2) + .size(2) + .shardSize(2) + .executionHint(randomExecutionHint()) ) - .get(); - assertSearchResponse(response); + ).get(); + assertNoFailures(response); InternalFilter filteredBucket = response.getAggregations().get("inclass"); SignificantTerms sigterms = filteredBucket.getAggregations().get("mySignificantTerms"); assertThat(sigterms.getBuckets().size(), equalTo(0)); - response = client().prepareSearch(index) - .addAggregation( - (filter("inclass", QueryBuilders.termQuery("class", true))).subAggregation( - significantTerms("mySignificantTerms").field("text") - .minDocCount(2) - .shardSize(2) - .shardMinDocCount(2) - .size(2) - .executionHint(randomExecutionHint()) - ) + response = prepareSearch(index).addAggregation( + (filter("inclass", QueryBuilders.termQuery("class", true))).subAggregation( + significantTerms("mySignificantTerms").field("text") + .minDocCount(2) + .shardSize(2) + .shardMinDocCount(2) + .size(2) + .executionHint(randomExecutionHint()) ) - .get(); - assertSearchResponse(response); + ).get(); + assertNoFailures(response); filteredBucket = response.getAggregations().get("inclass"); sigterms = filteredBucket.getAggregations().get("mySignificantTerms"); assertThat(sigterms.getBuckets().size(), equalTo(2)); @@ -126,32 +122,28 @@ public void testShardMinDocCountTermsTest() throws Exception { indexRandom(true, false, indexBuilders); // first, check that indeed when not setting the shardMinDocCount parameter 0 terms are returned - SearchResponse response = client().prepareSearch(index) - .addAggregation( - terms("myTerms").field("text") - .minDocCount(2) - .size(2) - .shardSize(2) - .executionHint(randomExecutionHint()) - .order(BucketOrder.key(true)) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch(index).addAggregation( + terms("myTerms").field("text") + .minDocCount(2) + .size(2) + .shardSize(2) + .executionHint(randomExecutionHint()) + .order(BucketOrder.key(true)) + ).get(); + assertNoFailures(response); Terms sigterms = response.getAggregations().get("myTerms"); assertThat(sigterms.getBuckets().size(), equalTo(0)); - response = client().prepareSearch(index) - .addAggregation( - terms("myTerms").field("text") - .minDocCount(2) - .shardMinDocCount(2) - .size(2) - .shardSize(2) - .executionHint(randomExecutionHint()) - .order(BucketOrder.key(true)) - ) - .get(); - assertSearchResponse(response); + response = prepareSearch(index).addAggregation( + terms("myTerms").field("text") + .minDocCount(2) + .shardMinDocCount(2) + .size(2) + .shardSize(2) + .executionHint(randomExecutionHint()) + .order(BucketOrder.key(true)) + ).get(); + assertNoFailures(response); sigterms = response.getAggregations().get("myTerms"); assertThat(sigterms.getBuckets().size(), equalTo(2)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java index 60af5cfad0004..ceafd07c67d65 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -58,7 +58,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.stats; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -122,7 +122,6 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked( indicesAdmin().prepareCreate("idx") .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") - .get() ); List builders = new ArrayList<>(); for (int i = 0; i < 5; i++) { @@ -148,7 +147,6 @@ public void setupSuiteScopeCluster() throws Exception { assertAcked( indicesAdmin().prepareCreate("high_card_idx") .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") - .get() ); for (int i = 0; i < 100; i++) { builders.add( @@ -226,7 +224,6 @@ private void getMultiSortDocs(List builders) throws IOExcep assertAcked( indicesAdmin().prepareCreate("sort_idx") .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") - .get() ); for (int i = 1; i <= 3; i++) { builders.add( @@ -277,15 +274,13 @@ public void testSizeIsZero() { final int minDocCount = randomInt(1); IllegalArgumentException exception = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("high_card_idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .minDocCount(minDocCount) - .size(0) - ) - .get() + () -> prepareSearch("high_card_idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .minDocCount(minDocCount) + .size(0) + ).get() ); assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [terms]")); } @@ -300,12 +295,10 @@ public void testMultiValueFieldWithPartitionedFiltering() throws Exception { private void runTestFieldWithPartitionedFiltering(String field) throws Exception { // Find total number of unique terms - SearchResponse allResponse = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(allResponse); + SearchResponse allResponse = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(allResponse); StringTerms terms = allResponse.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -315,14 +308,12 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception final int numPartitions = randomIntBetween(2, 4); Set foundTerms = new HashSet<>(); for (int partition = 0; partition < numPartitions; partition++) { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").field(field) - .includeExclude(new IncludeExclude(partition, numPartitions)) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").field(field) + .includeExclude(new IncludeExclude(partition, numPartitions)) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); + assertNoFailures(response); terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -334,16 +325,14 @@ private void runTestFieldWithPartitionedFiltering(String field) throws Exception } public void testSingleValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -359,16 +348,14 @@ public void testSingleValuedFieldWithValueScript() throws Exception { } public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value.substring(0,3)", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value.substring(0,3)", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -382,22 +369,15 @@ public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception { } public void testMultiValuedScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .script( - new Script( - ScriptType.INLINE, - CustomScriptPlugin.NAME, - "doc['" + MULTI_VALUED_FIELD_NAME + "']", - Collections.emptyMap() - ) - ) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .script( + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['" + MULTI_VALUED_FIELD_NAME + "']", Collections.emptyMap()) + ) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -417,16 +397,14 @@ public void testMultiValuedScript() throws Exception { } public void testMultiValuedFieldWithValueScript() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(MULTI_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -465,15 +443,13 @@ public void testScriptSingleValue() throws Exception { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .script(script) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .script(script) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -496,15 +472,13 @@ public void testScriptSingleValueExplicitSingleValue() throws Exception { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .script(script) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .script(script) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -520,7 +494,7 @@ public void testScriptSingleValueExplicitSingleValue() throws Exception { } public void testScriptMultiValued() throws Exception { - SearchResponse response = client().prepareSearch("idx") + SearchResponse response = prepareSearch("idx") .addAggregation( new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) @@ -536,7 +510,7 @@ public void testScriptMultiValued() throws Exception { ) .get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -556,15 +530,13 @@ public void testScriptMultiValued() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -581,14 +553,11 @@ public void testPartiallyUnmapped() throws Exception { public void testStringTermsNestedIntoPerBucketAggregator() throws Exception { // no execution hint so that the logic that decides whether or not to use ordinals is executed - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( - new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation( + new TermsAggregationBuilder("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())) ) - .get(); + ).get(); assertThat(response.getFailedShards(), equalTo(0)); @@ -610,17 +579,15 @@ public void testStringTermsNestedIntoPerBucketAggregator() throws Exception { public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { boolean asc = true; try { - client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("inner_terms>avg", asc)) - .subAggregation( - new TermsAggregationBuilder("inner_terms").field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("i")) - ) - ) - .get(); + prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("inner_terms>avg", asc)) + .subAggregation( + new TermsAggregationBuilder("inner_terms").field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("i")) + ) + ).get(); fail("Expected an exception"); } catch (SearchPhaseExecutionException e) { ElasticsearchException[] rootCauses = e.guessRootCauses(); @@ -645,17 +612,15 @@ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception { public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter", asc)) - .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter", asc)) + .subAggregation(filter("filter", QueryBuilders.matchAllQuery())) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); @@ -683,21 +648,19 @@ public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception { boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>filter2>stats.max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(stats("stats").field("i")) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>filter2>stats.max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(stats("stats").field("i")) ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); @@ -746,21 +709,19 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS statsNameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(stats(statsName).field("i")) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(stats(statsName).field("i")) ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); @@ -809,21 +770,19 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS statsNameBuilder.append(randomAlphaOfLengthBetween(3, 10).replace("[", "").replace("]", "").replace(">", "")); String statsName = statsNameBuilder.toString(); boolean asc = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) - .field("tag") - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc)) - .subAggregation( - filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( - filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(stats(statsName).field("i")) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("tags").executionHint(randomExecutionHint()) + .field("tag") + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc)) + .subAggregation( + filter("filter1", QueryBuilders.matchAllQuery()).subAggregation( + filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(stats(statsName).field("i")) ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms tags = response.getAggregations().get("tags"); assertThat(tags, notNullValue()); @@ -867,14 +826,12 @@ public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevelsS public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("avg_i", true)) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("avg_i", true)) + ).get(); fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist"); @@ -887,17 +844,15 @@ public void testSingleValuedFieldOrderedByMissingSubAggregation() throws Excepti public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("values", true)) - .subAggregation( - new TermsAggregationBuilder("values").field("i").collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - .get(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("values", true)) + .subAggregation( + new TermsAggregationBuilder("values").field("i").collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ).get(); fail( "Expected search to fail when trying to sort terms aggregation by sug-aggregation " @@ -913,15 +868,13 @@ public void testSingleValuedFieldOrderedByNonMetricsOrMultiBucketSubAggregation( public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - SearchResponse response = client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.foo", true)) - .subAggregation(stats("stats").field("i")) - ) - .get(); + SearchResponse response = prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.foo", true)) + .subAggregation(stats("stats").field("i")) + ).get(); fail( "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " + "with an unknown specified metric to order by. response had " @@ -938,16 +891,13 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithUnknownMe public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric() throws Exception { for (String index : Arrays.asList("idx", "idx_unmapped")) { try { - client().prepareSearch(index) - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats", true)) - .subAggregation(stats("stats").field("i")) - ) - .execute() - .actionGet(); + prepareSearch(index).addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats", true)) + .subAggregation(stats("stats").field("i")) + ).execute().actionGet(); fail( "Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " @@ -962,17 +912,15 @@ public void testSingleValuedFieldOrderedByMultiValuedSubAggregationWithoutMetric public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field("i")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field("i")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -994,17 +942,15 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws E public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception { boolean asc = false; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.avg", asc)) - .subAggregation(stats("stats").field("i")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.avg", asc)) + .subAggregation(stats("stats").field("i")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -1027,17 +973,15 @@ public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) - .subAggregation(extendedStats("stats").field("i")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) + .subAggregation(extendedStats("stats").field("i")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -1060,20 +1004,18 @@ public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Ex public void testSingleValuedFieldOrderedByStatsAggAscWithTermsSubAgg() throws Exception { boolean asc = true; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) - .subAggregation(extendedStats("stats").field("i")) - .subAggregation( - new TermsAggregationBuilder("subTerms").field("s_values").collectMode(randomFrom(SubAggCollectionMode.values())) - ) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("stats.sum_of_squares", asc)) + .subAggregation(extendedStats("stats").field("i")) + .subAggregation( + new TermsAggregationBuilder("subTerms").field("s_values").collectMode(randomFrom(SubAggCollectionMode.values())) + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -1146,18 +1088,16 @@ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound } private void assertMultiSortResponse(String[] expectedKeys, BucketOrder... order) { - SearchResponse response = client().prepareSearch("sort_idx") - .addAggregation( - new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) - .field(SINGLE_VALUED_FIELD_NAME) - .collectMode(randomFrom(SubAggCollectionMode.values())) - .order(BucketOrder.compound(order)) - .subAggregation(avg("avg_l").field("l")) - .subAggregation(sum("sum_d").field("d")) - ) - .get(); + SearchResponse response = prepareSearch("sort_idx").addAggregation( + new TermsAggregationBuilder("terms").executionHint(randomExecutionHint()) + .field(SINGLE_VALUED_FIELD_NAME) + .collectMode(randomFrom(SubAggCollectionMode.values())) + .order(BucketOrder.compound(order)) + .subAggregation(avg("avg_l").field("l")) + .subAggregation(sum("sum_d").field("d")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -1180,15 +1120,13 @@ private void assertMultiSortResponse(String[] expectedKeys, BucketOrder... order } public void testIndexMetaField() throws Exception { - SearchResponse response = client().prepareSearch("idx", "empty_bucket_idx") - .addAggregation( - new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) - .executionHint(randomExecutionHint()) - .field(IndexFieldMapper.NAME) - ) - .get(); + SearchResponse response = prepareSearch("idx", "empty_bucket_idx").addAggregation( + new TermsAggregationBuilder("terms").collectMode(randomFrom(SubAggCollectionMode.values())) + .executionHint(randomExecutionHint()) + .field(IndexFieldMapper.NAME) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); StringTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -1215,7 +1153,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=keyword") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -1234,14 +1171,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( new TermsAggregationBuilder("terms").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "Math.random()", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1253,14 +1189,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( new TermsAggregationBuilder("terms").field("d") .script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "'foo_' + _value", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1272,8 +1207,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(new TermsAggregationBuilder("terms").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1295,9 +1230,9 @@ public void testScriptWithValueType() throws Exception { String source = builder.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - SearchResponse response = client().prepareSearch("idx").setSource(new SearchSourceBuilder().parseXContent(parser, true)).get(); + SearchResponse response = prepareSearch("idx").setSource(new SearchSourceBuilder().parseXContent(parser, true)).get(); - assertSearchResponse(response); + assertNoFailures(response); LongTerms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); assertThat(terms.getName(), equalTo("terms")); @@ -1309,7 +1244,7 @@ public void testScriptWithValueType() throws Exception { try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidValueType)) { XContentParseException ex = expectThrows( XContentParseException.class, - () -> client().prepareSearch("idx").setSource(new SearchSourceBuilder().parseXContent(parser, true)).get() + () -> prepareSearch("idx").setSource(new SearchSourceBuilder().parseXContent(parser, true)).get() ); assertThat(ex.getCause(), instanceOf(IllegalArgumentException.class)); assertThat(ex.getCause().getMessage(), containsString("Unknown value type [foobar]")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index ce35f65c8a948..44361587dd09e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -49,14 +49,12 @@ public void testRequestBreaker() throws Exception { ); try { - client().prepareSearch("test") - .addAggregation( - terms("terms").field("field0.keyword") - .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) - .order(BucketOrder.aggregation("cardinality", randomBoolean())) - .subAggregation(cardinality("cardinality").precisionThreshold(randomLongBetween(1, 40000)).field("field1.keyword")) - ) - .get(); + prepareSearch("test").addAggregation( + terms("terms").field("field0.keyword") + .collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())) + .order(BucketOrder.aggregation("cardinality", randomBoolean())) + .subAggregation(cardinality("cardinality").precisionThreshold(randomLongBetween(1, 40000)).field("field1.keyword")) + ).get(); } catch (ElasticsearchException e) { if (ExceptionsHelper.unwrap(e, CircuitBreakingException.class) == null) { throw e; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 51d0d5411d6f7..64a97bf0f6f16 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -38,7 +38,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -91,8 +91,7 @@ private static double varianceSampling(int... vals) { @Override public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) ) @@ -126,8 +125,7 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value")) .get(); @@ -158,13 +156,11 @@ public void testUnmapped() throws Exception { public void testPartiallyUnmapped() { double sigma = randomDouble() * 5; - ExtendedStats s1 = client().prepareSearch("idx") - .addAggregation(extendedStats("stats").field("value").sigma(sigma)) + ExtendedStats s1 = prepareSearch("idx").addAggregation(extendedStats("stats").field("value").sigma(sigma)) .get() .getAggregations() .get("stats"); - ExtendedStats s2 = client().prepareSearch("idx", "idx_unmapped") - .addAggregation(extendedStats("stats").field("value").sigma(sigma)) + ExtendedStats s2 = prepareSearch("idx", "idx_unmapped").addAggregation(extendedStats("stats").field("value").sigma(sigma)) .get() .getAggregations() .get("stats"); @@ -187,8 +183,7 @@ public void testPartiallyUnmapped() { @Override public void testSingleValuedField() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value").sigma(sigma)) .get(); @@ -215,8 +210,7 @@ public void testSingleValuedField() throws Exception { public void testSingleValuedFieldDefaultSigma() throws Exception { // Same as previous test, but uses a default value for sigma - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value")) .get(); @@ -242,8 +236,7 @@ public void testSingleValuedFieldDefaultSigma() throws Exception { public void testSingleValuedField_WithFormatter() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").format("0000.0").field("value").sigma(sigma)) .get(); @@ -280,8 +273,7 @@ public void testSingleValuedField_WithFormatter() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(extendedStats("stats").field("value"))) .get(); @@ -350,8 +342,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("value").sigma(sigma)) .get(); @@ -378,8 +369,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { @Override public void testSingleValuedFieldWithValueScript() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( extendedStats("stats").field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) @@ -412,8 +402,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("inc", 1); double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( extendedStats("stats").field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) @@ -444,8 +433,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testMultiValuedField() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").field("values").sigma(sigma)) .get(); @@ -487,8 +475,7 @@ public void testMultiValuedField() throws Exception { @Override public void testMultiValuedFieldWithValueScript() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( extendedStats("stats").field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", Collections.emptyMap())) @@ -534,8 +521,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( extendedStats("stats").field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) @@ -578,8 +564,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testScriptSingleValued() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( extendedStats("stats").script( new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap()) @@ -615,8 +600,7 @@ public void testScriptSingleValuedWithParams() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params); double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script(script).sigma(sigma)) .get(); @@ -643,8 +627,7 @@ public void testScriptSingleValuedWithParams() throws Exception { @Override public void testScriptMultiValued() throws Exception { double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( extendedStats("stats").script( new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap()) @@ -700,8 +683,7 @@ public void testScriptMultiValuedWithParams() throws Exception { ); double sigma = randomDouble() * randomIntBetween(1, 10); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(extendedStats("stats").script(script).sigma(sigma)) .get(); @@ -732,8 +714,7 @@ public void testScriptMultiValuedWithParams() throws Exception { } public void testEmptySubAggregation() { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("value").field("value") .subAggregation(missing("values").field("values").subAggregation(extendedStats("stats").field("value"))) @@ -776,8 +757,7 @@ public void testEmptySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>extendedStats.avg", true))) @@ -847,7 +827,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -866,14 +845,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( extendedStats("foo").field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -885,14 +863,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( extendedStats("foo").field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -904,8 +881,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(extendedStats("foo").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(extendedStats("foo").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java index 12bb521358642..3aebbce43e1e1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsIT.java @@ -14,7 +14,7 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -26,11 +26,11 @@ public class GeoBoundsIT extends SpatialBoundsAggregationTestBase { public void testSingleValuedFieldNearDateLine() { - SearchResponse response = client().prepareSearch(DATELINE_IDX_NAME) - .addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(false)) - .get(); + SearchResponse response = prepareSearch(DATELINE_IDX_NAME).addAggregation( + boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(false) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); GeoPoint geoValuesTopLeft = new GeoPoint(38, -179); GeoPoint geoValuesBottomRight = new GeoPoint(-24, 178); @@ -50,11 +50,11 @@ public void testSingleValuedFieldNearDateLineWrapLongitude() { GeoPoint geoValuesTopLeft = new GeoPoint(38, 170); GeoPoint geoValuesBottomRight = new GeoPoint(-24, -175); - SearchResponse response = client().prepareSearch(DATELINE_IDX_NAME) - .addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(true)) - .get(); + SearchResponse response = prepareSearch(DATELINE_IDX_NAME).addAggregation( + boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME).wrapLongitude(true) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); GeoBounds geoBounds = response.getAggregations().get(aggName()); assertThat(geoBounds, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java index d1caf6d5cb80a..4b12cddde691f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidIT.java @@ -18,7 +18,7 @@ import java.util.List; import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -29,13 +29,10 @@ public class GeoCentroidIT extends CentroidAggregationTestBase { public void testSingleValueFieldAsSubAggToGeohashGrid() { - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) - .addAggregation( - geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME) - .subAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch(HIGH_CARD_IDX_NAME).addAggregation( + geohashGrid("geoGrid").field(SINGLE_VALUED_FIELD_NAME).subAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)) + ).get(); + assertNoFailures(response); GeoGrid grid = response.getAggregations().get("geoGrid"); assertThat(grid, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 0bdd75f73e743..7d5e446d591bb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -39,7 +39,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -99,8 +99,7 @@ private void assertConsistent(double[] pcts, PercentileRanks values, long minVal @Override public void testEmptyAggregation() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -129,8 +128,7 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 }).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -153,8 +151,7 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -173,8 +170,7 @@ public void testNullValuesField() throws Exception { final double[] pcts = null; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .setQuery(matchAllQuery()) + () -> prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -190,8 +186,7 @@ public void testEmptyValuesField() throws Exception { final double[] pcts = new double[0]; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .setQuery(matchAllQuery()) + () -> prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -206,8 +201,7 @@ public void testEmptyValuesField() throws Exception { public void testSingleValuedFieldGetProperty() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( global("global").subAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) @@ -236,8 +230,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { public void testSingleValuedFieldOutsideRange() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = new double[] { minValue - 1, maxValue + 1 }; - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -255,8 +248,7 @@ public void testSingleValuedFieldOutsideRange() throws Exception { public void testSingleValuedFieldPartiallyUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -274,8 +266,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -296,8 +287,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -316,8 +306,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValues, maxValues); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -335,8 +324,7 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -354,8 +342,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(20 - maxValues, 20 - minValues); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -376,8 +363,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -396,8 +382,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { int sigDigits = randomSignificantDigits(); final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -420,8 +405,7 @@ public void testScriptSingleValuedWithParams() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -442,8 +426,7 @@ public void testScriptMultiValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -463,8 +446,7 @@ public void testScriptMultiValuedWithParams() throws Exception { Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.HDR) .numberOfSignificantValueDigits(sigDigits) @@ -481,8 +463,7 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { int sigDigits = randomSignificantDigits(); boolean asc = randomBoolean(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(2L) @@ -513,8 +494,7 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true))) @@ -557,7 +537,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -576,15 +555,14 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR) .field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -596,15 +574,14 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR) .field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -616,11 +593,10 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(percentileRanks("foo", new double[] { 50.0 }).method(PercentilesMethod.HDR).field("d")) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 58a22a32ae13b..3ac50c7b5e104 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -41,7 +41,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -102,8 +102,7 @@ private void assertConsistent(double[] pcts, Percentiles percentiles, long minVa @Override public void testEmptyAggregation() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -133,8 +132,7 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -158,8 +156,7 @@ public void testUnmapped() throws Exception { public void testSingleValuedField() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomIntBetween(1, 5); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -178,8 +175,7 @@ public void testSingleValuedField() throws Exception { public void testSingleValuedFieldGetProperty() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( global("global").subAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) @@ -210,8 +206,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { public void testSingleValuedFieldPartiallyUnmapped() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -230,8 +225,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { public void testSingleValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -254,8 +248,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -275,8 +268,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { public void testMultiValuedField() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -295,8 +287,7 @@ public void testMultiValuedField() throws Exception { public void testMultiValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -315,8 +306,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -339,8 +329,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -360,8 +349,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -385,8 +373,7 @@ public void testScriptSingleValuedWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -408,8 +395,7 @@ public void testScriptMultiValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -430,8 +416,7 @@ public void testScriptMultiValuedWithParams() throws Exception { final double[] pcts = randomPercentiles(); int sigDigits = randomSignificantDigits(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( percentiles("percentiles").numberOfSignificantValueDigits(sigDigits) .method(PercentilesMethod.HDR) @@ -449,8 +434,7 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { int sigDigits = randomSignificantDigits(); boolean asc = randomBoolean(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(2L) @@ -482,8 +466,7 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true))) @@ -526,7 +509,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -545,8 +527,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentiles("foo").method(PercentilesMethod.HDR) .field("d") @@ -554,7 +535,7 @@ public void testScriptCaching() throws Exception { .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -566,8 +547,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentiles("foo").method(PercentilesMethod.HDR) .field("d") @@ -575,7 +555,7 @@ public void testScriptCaching() throws Exception { .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -587,11 +567,10 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(percentiles("foo").method(PercentilesMethod.HDR).field("d").percentiles(50.0)) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index b9eacda5d3750..dae90424495a3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -45,7 +45,7 @@ import static org.elasticsearch.search.aggregations.metrics.MedianAbsoluteDeviationAggregatorTests.IsCloseToRelative.closeToRelative; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -138,9 +138,9 @@ private static MedianAbsoluteDeviationAggregationBuilder randomBuilder() { @Override public void testEmptyAggregation() throws Exception { - final SearchResponse response = client().prepareSearch("empty_bucket_idx") - .addAggregation(histogram("histogram").field("value").interval(1).minDocCount(0).subAggregation(randomBuilder().field("value"))) - .get(); + final SearchResponse response = prepareSearch("empty_bucket_idx").addAggregation( + histogram("histogram").field("value").interval(1).minDocCount(0).subAggregation(randomBuilder().field("value")) + ).get(); assertHitCount(response, 2); @@ -162,10 +162,7 @@ public void testUnmapped() throws Exception { @Override public void testSingleValuedField() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(randomBuilder().field("value")) - .get(); + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(randomBuilder().field("value")).get(); assertHitCount(response, NUMBER_OF_DOCS); @@ -177,8 +174,7 @@ public void testSingleValuedField() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(randomBuilder().field("value"))) .get(); @@ -199,8 +195,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { - final SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomBuilder().field("value")) .get(); @@ -214,8 +209,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { @Override public void testSingleValuedFieldWithValueScript() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) @@ -237,8 +231,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) @@ -257,8 +250,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testMultiValuedField() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomBuilder().field("values")) .get(); @@ -272,8 +264,7 @@ public void testMultiValuedField() throws Exception { @Override public void testMultiValuedFieldWithValueScript() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) @@ -294,8 +285,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + inc", params)) @@ -313,8 +303,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testScriptSingleValued() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().script( new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", Collections.emptyMap()) @@ -335,8 +324,7 @@ public void testScriptSingleValuedWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value + inc", params)) ) @@ -354,8 +342,7 @@ public void testScriptSingleValuedWithParams() throws Exception { @Override public void testScriptMultiValued() throws Exception { - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().script( new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", Collections.emptyMap()) @@ -376,8 +363,7 @@ public void testScriptMultiValuedWithParams() throws Exception { final Map params = new HashMap<>(); params.put("inc", 1); - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomBuilder().script( new Script( @@ -404,8 +390,7 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testAsSubAggregation() throws Exception { final int rangeBoundary = (MAX_SAMPLE_VALUE + MIN_SAMPLE_VALUE) / 2; - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( range("range").field("value") .addRange(MIN_SAMPLE_VALUE, rangeBoundary) @@ -448,8 +433,7 @@ public void testAsSubAggregation() throws Exception { @Override public void testOrderByEmptyAggregation() throws Exception { final int numberOfBuckets = 10; - final SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + final SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .size(numberOfBuckets) @@ -490,7 +474,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( @@ -510,14 +493,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( randomBuilder().field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -529,14 +511,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( randomBuilder().field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -548,8 +529,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(randomBuilder().field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(randomBuilder().field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index a144acf809b53..2ea09960071f9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -49,7 +49,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.scriptedMetric; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -359,11 +359,10 @@ public void testMap() { Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -402,8 +401,7 @@ public void testMapWithParams() { Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(aggregationParams) .mapScript(mapScript) @@ -411,7 +409,7 @@ public void testMapWithParams() { .reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -451,8 +449,7 @@ public void testInitMutatesParams() { Map params = new HashMap<>(); params.put("vars", varsMap); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params) .initScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) @@ -463,7 +460,7 @@ public void testInitMutatesParams() { .reduceScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap())) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -511,13 +508,12 @@ public void testMapCombineWithParams() { ); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -570,8 +566,7 @@ public void testInitMapCombineWithParams() { ); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params) .initScript(initScript) @@ -580,7 +575,7 @@ public void testInitMapCombineWithParams() { .reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -638,8 +633,7 @@ public void testInitMapCombineReduceWithParams() { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params) .initScript(initScript) @@ -648,7 +642,7 @@ public void testInitMapCombineReduceWithParams() { .reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -694,8 +688,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { Collections.emptyMap() ); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( global("global").subAggregation( scriptedMetric("scripted").params(params) @@ -707,7 +700,7 @@ public void testInitMapCombineReduceGetProperty() throws Exception { ) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numDocs)); Global global = searchResponse.getAggregations().get("global"); @@ -759,13 +752,12 @@ public void testMapCombineReduceWithParams() { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -805,8 +797,7 @@ public void testInitMapReduceWithParams() { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params) .initScript(initScript) @@ -815,7 +806,7 @@ public void testInitMapReduceWithParams() { .reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -853,13 +844,12 @@ public void testMapReduceWithParams() { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -907,8 +897,7 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { reduceParams ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params) .initScript(initScript) @@ -917,7 +906,7 @@ public void testInitMapCombineReduceWithParamsAndReduceParams() { .reduceScript(reduceScript) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -942,8 +931,7 @@ public void testInitMapCombineReduceWithParamsStored() { Map params = new HashMap<>(); params.put("vars", varsMap); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params) .initScript(new Script(ScriptType.STORED, null, "initScript_stored", Collections.emptyMap())) @@ -952,7 +940,7 @@ public void testInitMapCombineReduceWithParamsStored() { .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); @@ -997,8 +985,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { Collections.emptyMap() ); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch("idx").setQuery(matchAllQuery()) .setSize(1000) .addAggregation( histogram("histo").field("l_value") @@ -1012,7 +999,7 @@ public void testInitMapCombineReduceWithParamsAsSubAgg() { ) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); assertThat(aggregation, notNullValue()); @@ -1070,8 +1057,7 @@ public void testEmptyAggregation() throws Exception { Collections.emptyMap() ); - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -1125,7 +1111,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -1144,13 +1129,12 @@ public void testScriptCaching() throws Exception { ); // Test that a non-deterministic init script causes the result to not be cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( scriptedMetric("foo").initScript(ndInitScript).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1162,11 +1146,10 @@ public void testScriptCaching() throws Exception { ); // Test that a non-deterministic map script causes the result to not be cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(scriptedMetric("foo").mapScript(ndMapScript).combineScript(combineScript).reduceScript(reduceScript)) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1178,11 +1161,10 @@ public void testScriptCaching() throws Exception { ); // Test that a non-deterministic combine script causes the result to not be cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(ndRandom).reduceScript(reduceScript)) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1194,11 +1176,10 @@ public void testScriptCaching() throws Exception { ); // NOTE: random reduce scripts don't hit the query shard context (they are done on the coordinator) and so can be cached. - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(ndRandom)) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1210,11 +1191,10 @@ public void testScriptCaching() throws Exception { ); // Test that all deterministic scripts cause the request to be cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1232,8 +1212,7 @@ public void testConflictingAggAndScriptParams() { Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap()); Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op list aggregation", Collections.emptyMap()); - SearchRequestBuilder builder = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchRequestBuilder builder = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index 06149028e3fb5..eb4d5aa74f2a0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -34,7 +34,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -48,8 +48,7 @@ protected Collection> nodePlugins() { @Override public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(stats("stats").field("value"))) .get(); @@ -73,10 +72,7 @@ public void testEmptyAggregation() throws Exception { @Override public void testSingleValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("value")) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(stats("stats").field("value")).get(); assertShardExecutionState(searchResponse, 0); @@ -94,8 +90,7 @@ public void testSingleValuedField() throws Exception { public void testSingleValuedField_WithFormatter() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(stats("stats").format("0000.0").field("value")) .get(); @@ -117,8 +112,7 @@ public void testSingleValuedField_WithFormatter() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(stats("stats").field("value"))) .get(); @@ -156,10 +150,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { @Override public void testMultiValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(stats("stats").field("values")) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(stats("stats").field("values")).get(); assertShardExecutionState(searchResponse, 0); @@ -180,8 +171,7 @@ public void testMultiValuedField() throws Exception { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>stats.avg", true))) @@ -235,7 +225,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -254,14 +243,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( stats("foo").field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -273,14 +261,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( stats("foo").field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value + 1", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -292,8 +279,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(stats("foo").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(stats("foo").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 75e2870c58ff5..a837b22694ef5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -38,7 +38,7 @@ import static org.elasticsearch.search.aggregations.metrics.MetricAggScriptPlugin.VALUE_SCRIPT; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -77,8 +77,7 @@ public void setupSuiteScopeCluster() throws Exception { @Override public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))) .get(); @@ -100,10 +99,7 @@ public void testUnmapped() throws Exception {} @Override public void testSingleValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("value")) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(sum("sum").field("value")).get(); assertHitCount(searchResponse, 10); @@ -114,8 +110,7 @@ public void testSingleValuedField() throws Exception { } public void testSingleValuedFieldWithFormatter() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(sum("sum").format("0000.0").field("value")) .get(); @@ -131,8 +126,7 @@ public void testSingleValuedFieldWithFormatter() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(sum("sum").field("value"))) .get(); @@ -158,10 +152,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { @Override public void testMultiValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(sum("sum").field("values")) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(sum("sum").field("values")).get(); assertHitCount(searchResponse, 10); @@ -173,8 +164,7 @@ public void testMultiValuedField() throws Exception { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>sum", true))) @@ -213,7 +203,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -232,13 +221,12 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( sum("foo").field("d").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, RANDOM_SCRIPT, Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -250,13 +238,12 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( sum("foo").field("d").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_SCRIPT, Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -268,8 +255,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(sum("foo").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(sum("foo").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -282,11 +269,9 @@ public void testScriptCaching() throws Exception { } public void testFieldAlias() { - SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(sum("sum").field("route_length_miles")) - .get(); + SearchResponse response = prepareSearch("old_index", "new_index").addAggregation(sum("sum").field("route_length_miles")).get(); - assertSearchResponse(response); + assertNoFailures(response); Sum sum = response.getAggregations().get("sum"); assertThat(sum, IsNull.notNullValue()); @@ -295,11 +280,11 @@ public void testFieldAlias() { } public void testFieldAliasInSubAggregation() { - SearchResponse response = client().prepareSearch("old_index", "new_index") - .addAggregation(terms("terms").field("transit_mode").subAggregation(sum("sum").field("route_length_miles"))) - .get(); + SearchResponse response = prepareSearch("old_index", "new_index").addAggregation( + terms("terms").field("transit_mode").subAggregation(sum("sum").field("route_length_miles")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 6909bd719f6dd..421d6f118c277 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -39,7 +39,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -96,8 +96,7 @@ private void assertConsistent(double[] pcts, PercentileRanks values, long minVal @Override public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -123,8 +122,7 @@ public void testNullValuesField() throws Exception { final double[] pcts = null; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .setQuery(matchAllQuery()) + () -> prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.TDIGEST).field("value")) .get() ); @@ -135,8 +133,7 @@ public void testEmptyValuesField() throws Exception { final double[] pcts = new double[0]; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("idx") - .setQuery(matchAllQuery()) + () -> prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(percentileRanks("percentile_ranks", pcts).method(PercentilesMethod.TDIGEST).field("value")) .get() ); @@ -145,8 +142,7 @@ public void testEmptyValuesField() throws Exception { @Override public void testUnmapped() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")) .get(); @@ -164,8 +160,7 @@ public void testUnmapped() throws Exception { @Override public void testSingleValuedField() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value")) .get(); @@ -178,8 +173,7 @@ public void testSingleValuedField() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value"))) .get(); @@ -200,8 +194,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { public void testSingleValuedFieldOutsideRange() throws Exception { final double[] pcts = new double[] { minValue - 1, maxValue + 1 }; - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value")) .get(); @@ -214,8 +207,7 @@ public void testSingleValuedFieldOutsideRange() throws Exception { @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("value")) .get(); @@ -228,8 +220,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { @Override public void testSingleValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentileRanks("percentile_ranks", pcts)).field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) @@ -247,8 +238,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentileRanks("percentile_ranks", pcts)).field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) @@ -264,8 +254,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testMultiValuedField() throws Exception { final double[] pcts = randomPercents(minValues, maxValues); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).field("values")) .get(); @@ -278,8 +267,7 @@ public void testMultiValuedField() throws Exception { @Override public void testMultiValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentileRanks("percentile_ranks", pcts)).field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) @@ -294,8 +282,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { final double[] pcts = randomPercents(-maxValues, -minValues); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentileRanks("percentile_ranks", pcts)).field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) @@ -313,8 +300,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentileRanks("percentile_ranks", pcts)).field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) @@ -330,8 +316,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testScriptSingleValued() throws Exception { final double[] pcts = randomPercents(minValue, maxValue); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentileRanks("percentile_ranks", pcts)).script( new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()) @@ -353,8 +338,7 @@ public void testScriptSingleValuedWithParams() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercents(minValue - 1, maxValue - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).script(script)) .get(); @@ -368,8 +352,7 @@ public void testScriptSingleValuedWithParams() throws Exception { public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercents(minValues, maxValues); Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).script(script)) .get(); @@ -384,8 +367,7 @@ public void testScriptMultiValuedWithParams() throws Exception { Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; final double[] pcts = randomPercents(minValues - 1, maxValues - 1); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", pcts)).script(script)) .get(); @@ -397,8 +379,7 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { boolean asc = randomBoolean(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(2L) @@ -425,8 +406,7 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true))) @@ -469,7 +449,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -488,14 +467,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentileRanks("foo", new double[] { 50.0 }).field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -507,14 +485,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentileRanks("foo", new double[] { 50.0 }).field("d") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -526,11 +503,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx") - .setSize(0) - .addAggregation(percentileRanks("foo", new double[] { 50.0 }).field("d")) - .get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(percentileRanks("foo", new double[] { 50.0 }).field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index 3fc5bf9863256..58b2b13853848 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -41,7 +41,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -102,8 +102,7 @@ private void assertConsistent(double[] pcts, Percentiles percentiles, long minVa @Override public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("empty_bucket_idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(1L) @@ -127,8 +126,7 @@ public void testEmptyAggregation() throws Exception { @Override public void testUnmapped() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)) .get(); @@ -146,8 +144,7 @@ public void testUnmapped() throws Exception { @Override public void testSingleValuedField() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts)) .get(); @@ -160,8 +157,7 @@ public void testSingleValuedField() throws Exception { @Override public void testSingleValuedFieldGetProperty() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts))) .get(); @@ -183,8 +179,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { @Override public void testSingleValuedFieldPartiallyUnmapped() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(pcts)) .get(); @@ -197,8 +192,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { @Override public void testSingleValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentiles("percentiles")).field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) @@ -217,8 +211,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentiles("percentiles")).field("value") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) @@ -235,8 +228,7 @@ public void testSingleValuedFieldWithValueScriptWithParams() throws Exception { @Override public void testMultiValuedField() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("values").percentiles(pcts)) .get(); @@ -249,8 +241,7 @@ public void testMultiValuedField() throws Exception { @Override public void testMultiValuedFieldWithValueScript() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentiles("percentiles")).field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) @@ -266,8 +257,7 @@ public void testMultiValuedFieldWithValueScript() throws Exception { public void testMultiValuedFieldWithValueScriptReverse() throws Exception { final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentiles("percentiles")).field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value * -1", emptyMap())) @@ -286,8 +276,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { Map params = new HashMap<>(); params.put("dec", 1); final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( randomCompression(percentiles("percentiles")).field("values") .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - dec", params)) @@ -305,8 +294,7 @@ public void testMultiValuedFieldWithValueScriptWithParams() throws Exception { public void testScriptSingleValued() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value", emptyMap()); final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) .get(); @@ -324,8 +312,7 @@ public void testScriptSingleValuedWithParams() throws Exception { Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['value'].value - dec", params); final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) .get(); @@ -340,8 +327,7 @@ public void testScriptMultiValued() throws Exception { final double[] pcts = randomPercentiles(); Script script = new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "doc['values']", emptyMap()); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) .get(); @@ -356,8 +342,7 @@ public void testScriptMultiValuedWithParams() throws Exception { Script script = AggregationTestScriptsPlugin.DECREMENT_ALL_VALUES; final double[] pcts = randomPercentiles(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).script(script).percentiles(pcts)) .get(); @@ -369,8 +354,7 @@ public void testScriptMultiValuedWithParams() throws Exception { public void testOrderBySubAggregation() { boolean asc = randomBoolean(); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( histogram("histo").field("value") .interval(2L) @@ -397,8 +381,7 @@ public void testOrderBySubAggregation() { @Override public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true))) @@ -441,7 +424,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -460,15 +442,14 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentiles("foo").field("d") .percentiles(50.0) .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "Math.random()", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -480,15 +461,14 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( percentiles("foo").field("d") .percentiles(50.0) .script(new Script(ScriptType.INLINE, AggregationTestScriptsPlugin.NAME, "_value - 1", emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -500,8 +480,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(percentiles("foo").field("d").percentiles(50.0)).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(percentiles("foo").field("d").percentiles(50.0)).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 5829f75d45edb..ab9ab37894f70 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -65,7 +65,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xcontent.XContentFactory.smileBuilder; import static org.elasticsearch.xcontent.XContentFactory.yamlBuilder; @@ -312,15 +311,13 @@ private String key(Terms.Bucket bucket) { } public void testBasics() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -348,13 +345,12 @@ public void testBasics() throws Exception { public void testIssue11119() throws Exception { // Test that top_hits aggregation is fed scores if query results size=0 - SearchResponse response = client().prepareSearch("field-collapsing") - .setSize(0) + SearchResponse response = prepareSearch("field-collapsing").setSize(0) .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); @@ -381,14 +377,13 @@ public void testIssue11119() throws Exception { // (technically not a test of top_hits but implementation details are // tied up with the need to feed scores into the agg tree even when // users don't want ranked set of query results.) - response = client().prepareSearch("field-collapsing") - .setSize(0) + response = prepareSearch("field-collapsing").setSize(0) .setMinScore(0.0001f) .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")) .get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); @@ -400,16 +395,14 @@ public void testIssue11119() throws Exception { } public void testBreadthFirstWithScoreNeeded() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .collectMode(SubAggCollectionMode.BREADTH_FIRST) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").size(3)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").size(3)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -431,18 +424,16 @@ public void testBreadthFirstWithScoreNeeded() throws Exception { } public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .collectMode(SubAggCollectionMode.BREADTH_FIRST) - .field(TERMS_AGGS_FIELD) - .order(BucketOrder.aggregation("max", false)) - .subAggregation(max("max").field(SORT_FIELD)) - .subAggregation(topHits("hits").size(3)) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .collectMode(SubAggCollectionMode.BREADTH_FIRST) + .field(TERMS_AGGS_FIELD) + .order(BucketOrder.aggregation("max", false)) + .subAggregation(max("max").field(SORT_FIELD)) + .subAggregation(topHits("hits").size(3)) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -464,12 +455,11 @@ public void testBreadthFirstWithAggOrderAndScoreNeeded() throws Exception { } public void testBasicsGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(topHits("hits"))) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); Global global = searchResponse.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -487,22 +477,19 @@ public void testBasicsGetProperty() throws Exception { public void testPagination() throws Exception { int size = randomIntBetween(1, 10); int from = randomIntBetween(0, 10); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).from(from).size(size)) - ) - .get(); - assertSearchResponse(response); - - SearchResponse control = client().prepareSearch("idx") - .setFrom(from) + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).from(from).size(size)) + ).get(); + assertNoFailures(response); + + SearchResponse control = prepareSearch("idx").setFrom(from) .setSize(size) .setPostFilter(QueryBuilders.termQuery(TERMS_AGGS_FIELD, "val0")) .addSort(SORT_FIELD, SortOrder.DESC) .get(); - assertSearchResponse(control); + assertNoFailures(control); SearchHits controlHits = control.getHits(); Terms terms = response.getAggregations().get("terms"); @@ -532,16 +519,14 @@ public void testPagination() throws Exception { } public void testSortByBucket() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .order(BucketOrder.aggregation("max_sort", false)) - .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true)) - .subAggregation(max("max_sort").field(SORT_FIELD)) - ) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .order(BucketOrder.aggregation("max_sort", false)) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true)) + .subAggregation(max("max_sort").field(SORT_FIELD)) + ).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -567,8 +552,7 @@ public void testSortByBucket() throws Exception { } public void testFieldCollapsing() throws Exception { - SearchResponse response = client().prepareSearch("field-collapsing") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("field-collapsing").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(matchQuery("text", "term rare")) .addAggregation( terms("terms").executionHint(randomExecutionHint()) @@ -578,7 +562,7 @@ public void testFieldCollapsing() throws Exception { .subAggregation(max("max_score").field("value")) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -613,8 +597,7 @@ public void testFieldCollapsing() throws Exception { public void testFetchFeatures() { final boolean seqNoAndTerm = randomBoolean(); - SearchResponse response = client().prepareSearch("idx") - .setQuery(matchQuery("text", "text").queryName("test")) + SearchResponse response = prepareSearch("idx").setQuery(matchQuery("text", "text").queryName("test")) .addAggregation( terms("terms").executionHint(randomExecutionHint()) .field(TERMS_AGGS_FIELD) @@ -632,7 +615,7 @@ public void testFetchFeatures() { ) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -684,13 +667,11 @@ public void testFetchFeatures() { public void testInvalidSortField() throws Exception { try { - client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").sort(SortBuilders.fieldSort("xyz").order(SortOrder.DESC))) - ) - .get(); + prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").sort(SortBuilders.fieldSort("xyz").order(SortOrder.DESC))) + ).get(); fail(); } catch (SearchPhaseExecutionException e) { assertThat(e.toString(), containsString("No mapping found for [xyz] in order to sort on")); @@ -698,8 +679,8 @@ public void testInvalidSortField() throws Exception { } public void testEmptyIndex() throws Exception { - SearchResponse response = client().prepareSearch("empty").addAggregation(topHits("hits")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("empty").addAggregation(topHits("hits")).get(); + assertNoFailures(response); TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); @@ -711,14 +692,13 @@ public void testTrackScores() throws Exception { boolean[] trackScores = new boolean[] { true, false }; for (boolean trackScore : trackScores) { logger.info("Track score={}", trackScore); - SearchResponse response = client().prepareSearch("field-collapsing") - .setQuery(matchQuery("text", "term rare")) + SearchResponse response = prepareSearch("field-collapsing").setQuery(matchQuery("text", "term rare")) .addAggregation( terms("terms").field("group") .subAggregation(topHits("hits").trackScores(trackScore).size(1).sort("_index", SortOrder.DESC)) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -749,8 +729,7 @@ public void testTrackScores() throws Exception { } public void testTopHitsInNestedSimple() throws Exception { - SearchResponse searchResponse = client().prepareSearch("articles") - .setQuery(matchQuery("title", "title")) + SearchResponse searchResponse = prepareSearch("articles").setQuery(matchQuery("title", "title")) .addAggregation( nested("to-comments", "comments").subAggregation( terms("users").field("comments.user").subAggregation(topHits("top-comments").sort("comments.date", SortOrder.ASC)) @@ -794,8 +773,7 @@ public void testTopHitsInNestedSimple() throws Exception { } public void testTopHitsInSecondLayerNested() throws Exception { - SearchResponse searchResponse = client().prepareSearch("articles") - .setQuery(matchQuery("title", "title")) + SearchResponse searchResponse = prepareSearch("articles").setQuery(matchQuery("title", "title")) .addAggregation( nested("to-comments", "comments").subAggregation( nested("to-reviewers", "comments.reviewers").subAggregation( @@ -897,8 +875,9 @@ public void testNestedFetchFeatures() { matchQuery("comments.message", "comment") ).highlighterType(hlType); - SearchResponse searchResponse = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"), ScoreMode.Avg)) + SearchResponse searchResponse = prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "comment").queryName("test"), ScoreMode.Avg) + ) .addAggregation( nested("to-comments", "comments").subAggregation( topHits("top-comments").size(1) @@ -950,21 +929,19 @@ public void testNestedFetchFeatures() { } public void testTopHitsInNested() throws Exception { - SearchResponse searchResponse = client().prepareSearch("articles") - .addAggregation( - histogram("dates").field("date") - .interval(5) - .subAggregation( - nested("to-comments", "comments").subAggregation( - topHits("comments").highlighter( - new HighlightBuilder().field( - new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")) - ) - ).sort("comments.id", SortOrder.ASC) - ) + SearchResponse searchResponse = prepareSearch("articles").addAggregation( + histogram("dates").field("date") + .interval(5) + .subAggregation( + nested("to-comments", "comments").subAggregation( + topHits("comments").highlighter( + new HighlightBuilder().field( + new HighlightBuilder.Field("comments.message").highlightQuery(matchQuery("comments.message", "text")) + ) + ).sort("comments.id", SortOrder.ASC) ) - ) - .get(); + ) + ).get(); Histogram histogram = searchResponse.getAggregations().get("dates"); for (int i = 0; i < numArticles; i += 5) { @@ -996,38 +973,33 @@ public void testUseMaxDocInsteadOfSize() throws Exception { "idx" ); assertNoFailures( - client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation( - topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1) - .sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) - ) - ) + prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation( + topHits("hits").size(ArrayUtil.MAX_ARRAY_LENGTH - 1).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)) + ) + ) ); updateIndexSettings(Settings.builder().putNull(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey()), "idx"); } public void testTooHighResultWindow() throws Exception { assertNoFailures( - client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").from(50).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) - ) + prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(50).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ) ); Exception e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) - ) - .get() + () -> prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ).get() ); assertThat( e.getCause().getMessage(), @@ -1035,13 +1007,11 @@ public void testTooHighResultWindow() throws Exception { ); e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) - ) - .get() + () -> prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ).get() ); assertThat( e.getCause().getMessage(), @@ -1050,34 +1020,30 @@ public void testTooHighResultWindow() throws Exception { updateIndexSettings(Settings.builder().put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110), "idx"); assertNoFailures( - client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) - ) + prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(100).size(10).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ) ); assertNoFailures( - client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) - ) + prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").from(10).size(100).sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC))) + ) ); updateIndexSettings(Settings.builder().putNull(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey()), "idx"); } public void testNoStoredFields() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms("terms").executionHint(randomExecutionHint()) - .field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").storedField("_none_")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms("terms").executionHint(randomExecutionHint()) + .field(TERMS_AGGS_FIELD) + .subAggregation(topHits("hits").storedField("_none_")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -1111,7 +1077,6 @@ public void testScriptCaching() throws Exception { .setSettings( Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1) ) - .get() ); indexRandom( true, @@ -1130,8 +1095,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script field does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( topHits("foo").scriptField( "bar", @@ -1139,7 +1103,7 @@ public void testScriptCaching() throws Exception { ) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1151,8 +1115,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script sort does not get cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( topHits("foo").sort( SortBuilders.scriptSort( @@ -1162,7 +1125,7 @@ public void testScriptCaching() throws Exception { ) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1174,13 +1137,12 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script field does not get cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( topHits("foo").scriptField("bar", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "5", Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1192,8 +1154,7 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script sort does not get cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( topHits("foo").sort( SortBuilders.scriptSort( @@ -1203,7 +1164,7 @@ public void testScriptCaching() throws Exception { ) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1215,8 +1176,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(topHits("foo")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(topHits("foo")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -1234,8 +1195,7 @@ public void testScriptCaching() throws Exception { public void testWithRescore() { // Rescore with default sort on relevancy (score) { - SearchResponse response = client().prepareSearch("idx") - .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + SearchResponse response = prepareSearch("idx").addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addAggregation(terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits"))) .get(); Terms terms = response.getAggregations().get("terms"); @@ -1248,8 +1208,7 @@ public void testWithRescore() { } { - SearchResponse response = client().prepareSearch("idx") - .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + SearchResponse response = prepareSearch("idx").addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addAggregation(terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.scoreSort()))) .get(); Terms terms = response.getAggregations().get("terms"); @@ -1263,8 +1222,7 @@ public void testWithRescore() { // Rescore should not be applied if the sort order is not relevancy { - SearchResponse response = client().prepareSearch("idx") - .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + SearchResponse response = prepareSearch("idx").addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addAggregation( terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_index"))) ) @@ -1279,8 +1237,7 @@ public void testWithRescore() { } { - SearchResponse response = client().prepareSearch("idx") - .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) + SearchResponse response = prepareSearch("idx").addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addAggregation( terms("terms").field(TERMS_AGGS_FIELD) .subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_index"))) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index dda06b9f422ad..d122ee10d90a5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -37,7 +37,7 @@ import static org.elasticsearch.search.aggregations.metrics.MetricAggScriptPlugin.VALUE_FIELD_SCRIPT; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -67,8 +67,7 @@ protected Collection> nodePlugins() { } public void testUnmapped() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(count("count").field("value")) .get(); @@ -81,10 +80,7 @@ public void testUnmapped() throws Exception { } public void testSingleValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(count("count").field("value")) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(count("count").field("value")).get(); assertHitCount(searchResponse, 10); @@ -95,8 +91,7 @@ public void testSingleValuedField() throws Exception { } public void testSingleValuedFieldGetProperty() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(global("global").subAggregation(count("count").field("value"))) .get(); @@ -119,8 +114,7 @@ public void testSingleValuedFieldGetProperty() throws Exception { } public void testSingleValuedFieldPartiallyUnmapped() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx", "idx_unmapped") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx", "idx_unmapped").setQuery(matchAllQuery()) .addAggregation(count("count").field("value")) .get(); @@ -133,10 +127,7 @@ public void testSingleValuedFieldPartiallyUnmapped() throws Exception { } public void testMultiValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) - .addAggregation(count("count").field("values")) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()).addAggregation(count("count").field("values")).get(); assertHitCount(searchResponse, 10); @@ -147,8 +138,7 @@ public void testMultiValuedField() throws Exception { } public void testSingleValuedScript() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_FIELD_SCRIPT, Collections.emptyMap())) ) @@ -163,8 +153,7 @@ public void testSingleValuedScript() throws Exception { } public void testMultiValuedScript() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_VALUES_FIELD_SCRIPT, Collections.emptyMap())) ) @@ -180,8 +169,7 @@ public void testMultiValuedScript() throws Exception { public void testSingleValuedScriptWithParams() throws Exception { Map params = Collections.singletonMap("field", "value"); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_FIELD_PARAMS_SCRIPT, params))) .get(); @@ -195,8 +183,7 @@ public void testSingleValuedScriptWithParams() throws Exception { public void testMultiValuedScriptWithParams() throws Exception { Map params = Collections.singletonMap("field", "values"); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(count("count").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, SUM_FIELD_PARAMS_SCRIPT, params))) .get(); @@ -216,7 +203,6 @@ public void testScriptCaching() throws Exception { assertAcked( prepareCreate("cache_test_idx").setMapping("d", "type=long") .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) - .get() ); indexRandom( true, @@ -235,13 +221,12 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a nondeterministic script does not get cached - SearchResponse r = client().prepareSearch("cache_test_idx") - .setSize(0) + SearchResponse r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( count("foo").field("d").script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, RANDOM_SCRIPT, Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -253,14 +238,13 @@ public void testScriptCaching() throws Exception { ); // Test that a request using a deterministic script gets cached - r = client().prepareSearch("cache_test_idx") - .setSize(0) + r = prepareSearch("cache_test_idx").setSize(0) .addAggregation( count("foo").field("d") .script(new Script(ScriptType.INLINE, METRIC_SCRIPT_ENGINE, VALUE_FIELD_SCRIPT, Collections.emptyMap())) ) .get(); - assertSearchResponse(r); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -272,8 +256,8 @@ public void testScriptCaching() throws Exception { ); // Ensure that non-scripted requests are cached as normal - r = client().prepareSearch("cache_test_idx").setSize(0).addAggregation(count("foo").field("d")).get(); - assertSearchResponse(r); + r = prepareSearch("cache_test_idx").setSize(0).addAggregation(count("foo").field("d")).get(); + assertNoFailures(r); assertThat( indicesAdmin().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), @@ -286,8 +270,7 @@ public void testScriptCaching() throws Exception { } public void testOrderByEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation( terms("terms").field("value") .order(BucketOrder.compound(BucketOrder.aggregation("filter>count", true))) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java index cc4cfabfffa54..195b4a41b617a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketMetricsPipeLineAggregationTestCase.java @@ -37,7 +37,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -127,14 +127,11 @@ private String randomName() { } public void testDocCountTopLevel() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) - ) - .addAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">_count")) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ).addAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">_count")).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get(histoName); assertThat(histo, notNullValue()); @@ -157,20 +154,16 @@ public void testDocCountTopLevel() { } public void testDocCountAsSubAgg() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - ) - .subAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">_count")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">_count")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -203,12 +196,11 @@ public void testDocCountAsSubAgg() { } public void testMetricTopLevel() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(BucketMetricsPipelineAgg("pipeline_agg", termsName + ">sum")) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).addAggregation(BucketMetricsPipelineAgg("pipeline_agg", termsName + ">sum")).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -236,21 +228,19 @@ public void testMetricTopLevel() { } public void testMetricAsSubAgg() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .subAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">sum")) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(BucketMetricsPipelineAgg("pipeline_agg", histoName + ">sum")) + ).get(); + + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -292,23 +282,21 @@ public void testMetricAsSubAgg() { } public void testMetricAsSubAggWithInsertZeros() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .subAggregation( - BucketMetricsPipelineAgg("pipeline_agg", histoName + ">sum").gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) - ) - ) - .get(); - - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation( + BucketMetricsPipelineAgg("pipeline_agg", histoName + ">sum").gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) + ) + ).get(); + + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -346,16 +334,13 @@ public void testMetricAsSubAggWithInsertZeros() { } public void testNoBuckets() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .includeExclude(new IncludeExclude(null, "tag.*", null, null)) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .addAggregation(BucketMetricsPipelineAgg("pipeline_agg", termsName + ">sum")) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .includeExclude(new IncludeExclude(null, "tag.*", null, null)) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).addAggregation(BucketMetricsPipelineAgg("pipeline_agg", termsName + ">sum")).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -371,21 +356,16 @@ public void testNoBuckets() { } public void testNested() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - ) - .subAggregation(BucketMetricsPipelineAgg("nested_histo_bucket", histoName + ">_count")) - ) - .addAggregation(BucketMetricsPipelineAgg("nested_terms_bucket", termsName + ">nested_histo_bucket." + nestedMetric())) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(BucketMetricsPipelineAgg("nested_histo_bucket", histoName + ">_count")) + ).addAggregation(BucketMetricsPipelineAgg("nested_terms_bucket", termsName + ">nested_histo_bucket." + nestedMetric())).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -488,7 +468,7 @@ public void testFieldIsntWrittenOutTwice() throws Exception { groupByLicenseAgg.subAggregation(licensePerDayBuilder); groupByLicenseAgg.subAggregation(BucketMetricsPipelineAgg("peak", "licenses_per_day>total_licenses")); - SearchResponse response = client().prepareSearch("foo_*").setSize(0).addAggregation(groupByLicenseAgg).get(); + SearchResponse response = prepareSearch("foo_*").setSize(0).addAggregation(groupByLicenseAgg).get(); BytesReference bytes = XContentHelper.toXContent(response, XContentType.JSON, false); XContentHelper.convertToMap(bytes, false, XContentType.JSON); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index 5c4caf5f242a6..9a9dc44b71ef2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -42,7 +42,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -155,26 +155,24 @@ private XContentBuilder newDocBuilder() throws IOException { } public void testInlineScript() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -205,26 +203,24 @@ public void testInlineScript() { } public void testInlineScript2() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 / _value2", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 / _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -255,26 +251,24 @@ public void testInlineScript2() { } public void testInlineScriptWithDateRange() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - dateRange("range").field(FIELD_5_NAME) - .addUnboundedFrom(date) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + dateRange("range").field(FIELD_5_NAME) + .addUnboundedFrom(date) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("range"); assertThat(range, notNullValue()); @@ -305,22 +299,20 @@ public void testInlineScriptWithDateRange() { } public void testInlineScriptSingleVariable() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0", Collections.emptyMap()), - "field2Sum" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0", Collections.emptyMap()), + "field2Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -349,24 +341,22 @@ public void testInlineScriptNamedVars() { bucketsPathsMap.put("foo", "field2Sum"); bucketsPathsMap.put("bar", "field3Sum"); bucketsPathsMap.put("baz", "field4Sum"); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - bucketsPathsMap, - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "foo + bar + baz", Collections.emptyMap()) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + bucketsPathsMap, + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "foo + bar + baz", Collections.emptyMap()) ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -402,18 +392,16 @@ public void testInlineScriptWithParams() { Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "(_value0 + _value1 + _value2) * factor", params); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation(bucketScript("seriesArithmetic", script, "field2Sum", "field3Sum", "field4Sum")) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation(bucketScript("seriesArithmetic", script, "field2Sum", "field3Sum", "field4Sum")) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -444,26 +432,24 @@ public void testInlineScriptWithParams() { } public void testInlineScriptInsertZeros() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ).gapPolicy(GapPolicy.INSERT_ZEROS) - ) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" + ).gapPolicy(GapPolicy.INSERT_ZEROS) + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -496,20 +482,15 @@ public void testInlineScriptInsertZeros() { } public void testInlineScriptReturnNull() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation( - bucketScript( - "nullField", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap()) - ) - ) - ) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation( + bucketScript("nullField", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap())) + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -533,26 +514,24 @@ public void testStoredScript() { ) ); - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.STORED, null, "my_script", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.STORED, null, "my_script", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -583,26 +562,24 @@ public void testStoredScript() { } public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx_unmapped") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ) + SearchResponse response = prepareSearch("idx_unmapped").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram deriv = response.getAggregations().get("histo"); assertThat(deriv, notNullValue()); @@ -611,26 +588,24 @@ public void testUnmapped() throws Exception { } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Sum", - "field3Sum", - "field4Sum" - ) + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Sum", + "field3Sum", + "field4Sum" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -674,16 +649,14 @@ public void testSingleBucketPathAgg() throws Exception { "seriesArithmetic" ); - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(bucketScriptAgg) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(bucketScriptAgg) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -721,18 +694,16 @@ public void testArrayBucketPathAgg() throws Exception { "seriesArithmetic" ); - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation(bucketScriptAgg) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation(bucketScriptAgg) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -780,18 +751,16 @@ public void testObjectBucketPathAgg() throws Exception { "seriesArithmetic" ); - SearchResponse response = client().prepareSearch("idx", "idx_unmapped") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) - .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) - .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) - .subAggregation(bucketScriptAgg) - ) - .get(); + SearchResponse response = prepareSearch("idx", "idx_unmapped").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(sum("field2Sum").field(FIELD_2_NAME)) + .subAggregation(sum("field3Sum").field(FIELD_3_NAME)) + .subAggregation(sum("field4Sum").field(FIELD_4_NAME)) + .subAggregation(bucketScriptAgg) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -823,29 +792,22 @@ public void testObjectBucketPathAgg() throws Exception { public void testInlineScriptWithMultiValueAggregationIllegalBucketsPaths() { try { - client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(percentiles("field2Percentile").field(FIELD_2_NAME).percentiles(10, 50, 90)) - .subAggregation(percentiles("field3Percentile").field(FIELD_3_NAME).percentiles(10, 50, 90)) - .subAggregation(percentiles("field4Percentile").field(FIELD_4_NAME).percentiles(10, 50, 90)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script( - ScriptType.INLINE, - CustomScriptPlugin.NAME, - "_value0 + _value1 + _value2", - Collections.emptyMap() - ), - "field2Percentile", - "field3Percentile", - "field4Percentile" - ) + prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(percentiles("field2Percentile").field(FIELD_2_NAME).percentiles(10, 50, 90)) + .subAggregation(percentiles("field3Percentile").field(FIELD_3_NAME).percentiles(10, 50, 90)) + .subAggregation(percentiles("field4Percentile").field(FIELD_4_NAME).percentiles(10, 50, 90)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Percentile", + "field3Percentile", + "field4Percentile" ) - ) - .get(); + ) + ).get(); fail("Illegal bucketsPaths was provided but no exception was thrown."); } catch (Exception e) { @@ -866,26 +828,24 @@ public void testInlineScriptWithMultiValueAggregationIllegalBucketsPaths() { public void testInlineScriptWithMultiValueAggregation() { int percentile = 90; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(percentiles("field2Percentile").field(FIELD_2_NAME).percentiles(percentile)) - .subAggregation(percentiles("field3Percentile").field(FIELD_3_NAME).percentiles(percentile)) - .subAggregation(percentiles("field4Percentile").field(FIELD_4_NAME).percentiles(percentile)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Percentile", - "field3Percentile", - "field4Percentile" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(percentiles("field2Percentile").field(FIELD_2_NAME).percentiles(percentile)) + .subAggregation(percentiles("field3Percentile").field(FIELD_3_NAME).percentiles(percentile)) + .subAggregation(percentiles("field4Percentile").field(FIELD_4_NAME).percentiles(percentile)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Percentile", + "field3Percentile", + "field4Percentile" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); @@ -919,28 +879,24 @@ public void testInlineScriptWithMultiValueAggregationDifferentBucketsPaths() { int percentile10 = 10; int percentile50 = 50; int percentile90 = 90; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - histogram("histo").field(FIELD_1_NAME) - .interval(interval) - .subAggregation(percentiles("field2Percentile").field(FIELD_2_NAME)) - .subAggregation( - percentiles("field3Percentile").field(FIELD_3_NAME).percentiles(percentile10, percentile50, percentile90) - ) - .subAggregation(percentiles("field4Percentile").field(FIELD_4_NAME).percentiles(percentile90)) - .subAggregation( - bucketScript( - "seriesArithmetic", - new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), - "field2Percentile.10", - "field3Percentile.50", - "field4Percentile" - ) + SearchResponse response = prepareSearch("idx").addAggregation( + histogram("histo").field(FIELD_1_NAME) + .interval(interval) + .subAggregation(percentiles("field2Percentile").field(FIELD_2_NAME)) + .subAggregation(percentiles("field3Percentile").field(FIELD_3_NAME).percentiles(percentile10, percentile50, percentile90)) + .subAggregation(percentiles("field4Percentile").field(FIELD_4_NAME).percentiles(percentile90)) + .subAggregation( + bucketScript( + "seriesArithmetic", + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_value0 + _value1 + _value2", Collections.emptyMap()), + "field2Percentile.10", + "field3Percentile.50", + "field4Percentile" ) - ) - .get(); + ) + ).get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 2950276e5c552..e5bece3c404ac 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -26,7 +26,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.extendedStatsBucket; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.notNullValue; @@ -100,11 +100,10 @@ public void setupSuiteScopeCluster() throws Exception { */ public void testGappyIndexWithSigma() { double sigma = randomDoubleBetween(1.0, 6.0, true); - SearchResponse response = client().prepareSearch("idx_gappy") - .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) + SearchResponse response = prepareSearch("idx_gappy").addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) .addAggregation(extendedStatsBucket("extended_stats_bucket", "histo>_count").sigma(sigma)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); assertThat(histo.getName(), equalTo("histo")); @@ -151,19 +150,17 @@ public void testGappyIndexWithSigma() { public void testBadSigmaAsSubAgg() throws Exception { Exception ex = expectThrows( Exception.class, - () -> client().prepareSearch("idx") - .addAggregation( - terms("terms").field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram("histo").field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum").sigma(-1.0)) - ) - .get() + () -> prepareSearch("idx").addAggregation( + terms("terms").field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ) + .subAggregation(extendedStatsBucket("extended_stats_bucket", "histo>sum").sigma(-1.0)) + ).get() ); Throwable cause = ExceptionsHelper.unwrapCause(ex); if (cause == null) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 53ebbfc0bb016..4b41d71ff6e61 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -28,7 +28,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.percentilesBucket; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.core.IsNull.notNullValue; @@ -68,12 +68,11 @@ protected double getNestedMetric(PercentilesBucket bucket) { } public void testMetricTopLevelDefaultPercents() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) - .addAggregation(percentilesBucket("percentiles_bucket", termsName + ">sum")) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).addAggregation(percentilesBucket("percentiles_bucket", termsName + ">sum")).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -101,16 +100,13 @@ public void testMetricTopLevelDefaultPercents() throws Exception { } public void testWrongPercents() throws Exception { - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .includeExclude(new IncludeExclude(null, "tag.*", null, null)) - .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) - ) - .addAggregation(percentilesBucket("percentiles_bucket", termsName + ">sum").setPercents(PERCENTS)) - .get(); + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .includeExclude(new IncludeExclude(null, "tag.*", null, null)) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)) + ).addAggregation(percentilesBucket("percentiles_bucket", termsName + ">sum").setPercents(PERCENTS)).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); @@ -134,8 +130,7 @@ public void testBadPercents() throws Exception { double[] badPercents = { -1.0, 110.0 }; try { - client().prepareSearch("idx") - .addAggregation(terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + prepareSearch("idx").addAggregation(terms(termsName).field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) .addAggregation(percentilesBucket("percentiles_bucket", termsName + ">sum").setPercents(badPercents)) .get(); @@ -161,18 +156,16 @@ public void testBadPercents_asSubAgg() throws Exception { double[] badPercents = { -1.0, 110.0 }; try { - client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - ) - .subAggregation(percentilesBucket("percentiles_bucket", histoName + ">_count").setPercents(badPercents)) - ) - .get(); + prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) + .interval(interval) + .extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(percentilesBucket("percentiles_bucket", histoName + ">_count").setPercents(badPercents)) + ).get(); fail("Illegal percent's were provided but no exception was thrown."); } catch (Exception e) { @@ -194,21 +187,18 @@ public void testBadPercents_asSubAgg() throws Exception { public void testNestedWithDecimal() throws Exception { double[] percent = { 99.9 }; - SearchResponse response = client().prepareSearch("idx") - .addAggregation( - terms(termsName).field("tag") - .order(BucketOrder.key(true)) - .subAggregation( - histogram(histoName).field(SINGLE_VALUED_FIELD_NAME) - .interval(interval) - .extendedBounds(minRandomValue, maxRandomValue) - ) - .subAggregation(percentilesBucket("percentile_histo_bucket", histoName + ">_count").setPercents(percent)) - ) + SearchResponse response = prepareSearch("idx").addAggregation( + terms(termsName).field("tag") + .order(BucketOrder.key(true)) + .subAggregation( + histogram(histoName).field(SINGLE_VALUED_FIELD_NAME).interval(interval).extendedBounds(minRandomValue, maxRandomValue) + ) + .subAggregation(percentilesBucket("percentile_histo_bucket", histoName + ">_count").setPercents(percent)) + ) .addAggregation(percentilesBucket("percentile_terms_bucket", termsName + ">percentile_histo_bucket[99.9]").setPercents(percent)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get(termsName); assertThat(terms, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchRedStateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchRedStateIndexIT.java index a79c09a72f7df..74acaf95bd24a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchRedStateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchRedStateIndexIT.java @@ -43,7 +43,7 @@ public void testAllowPartialsWithRedState() throws Exception { final int numShards = cluster().numDataNodes() + 2; buildRedIndex(numShards); - SearchResponse searchResponse = client().prepareSearch().setSize(0).setAllowPartialSearchResults(true).get(); + SearchResponse searchResponse = prepareSearch().setSize(0).setAllowPartialSearchResults(true).get(); assertThat(RestStatus.OK, equalTo(searchResponse.status())); assertThat("Expect some shards failed", searchResponse.getFailedShards(), allOf(greaterThan(0), lessThanOrEqualTo(numShards))); assertThat("Expect no shards skipped", searchResponse.getSkippedShards(), equalTo(0)); @@ -60,7 +60,7 @@ public void testClusterAllowPartialsWithRedState() throws Exception { setClusterDefaultAllowPartialResults(true); - SearchResponse searchResponse = client().prepareSearch().setSize(0).get(); + SearchResponse searchResponse = prepareSearch().setSize(0).get(); assertThat(RestStatus.OK, equalTo(searchResponse.status())); assertThat("Expect some shards failed", searchResponse.getFailedShards(), allOf(greaterThan(0), lessThanOrEqualTo(numShards))); assertThat("Expect no shards skipped", searchResponse.getSkippedShards(), equalTo(0)); @@ -79,7 +79,7 @@ public void testDisallowPartialsWithRedState() throws Exception { SearchPhaseExecutionException ex = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch().setSize(0).setAllowPartialSearchResults(false).get() + () -> prepareSearch().setSize(0).setAllowPartialSearchResults(false).get() ); assertThat(ex.getDetailedMessage(), containsString("Search rejected due to missing shard")); } @@ -88,10 +88,7 @@ public void testClusterDisallowPartialsWithRedState() throws Exception { buildRedIndex(cluster().numDataNodes() + 2); setClusterDefaultAllowPartialResults(false); - SearchPhaseExecutionException ex = expectThrows( - SearchPhaseExecutionException.class, - () -> client().prepareSearch().setSize(0).get() - ); + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, () -> prepareSearch().setSize(0).get()); assertThat(ex.getDetailedMessage(), containsString("Search rejected due to missing shard")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 5f42ac690fdde..eb6dd2f0767f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -64,7 +64,7 @@ private void searchWhileCreatingIndex(boolean createIndex, int numberOfReplicas) ClusterHealthStatus status = clusterAdmin().prepareHealth("test").get().getStatus(); while (status != ClusterHealthStatus.GREEN) { // first, verify that search normal search works - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "test")), 1); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "test")), 1); Client client = client(); SearchResponse searchResponse = client.prepareSearch("test") .setPreference(preference + Integer.toString(counter++)) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index a4a5765926a3c..24df07217a5a2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -61,7 +61,7 @@ private void testSearchAndRelocateConcurrently(final int numberOfReplicas) throw ); } indexRandom(true, indexBuilders.toArray(new IndexRequestBuilder[indexBuilders.size()])); - assertHitCount(client().prepareSearch(), (numDocs)); + assertHitCount(prepareSearch(), (numDocs)); final int numIters = scaledRandomIntBetween(5, 20); for (int i = 0; i < numIters; i++) { final AtomicBoolean stop = new AtomicBoolean(false); @@ -74,7 +74,7 @@ private void testSearchAndRelocateConcurrently(final int numberOfReplicas) throw public void run() { try { while (stop.get() == false) { - SearchResponse sr = client().prepareSearch().setSize(numDocs).get(); + SearchResponse sr = prepareSearch().setSize(numDocs).get(); if (sr.getHits().getTotalHits().value != numDocs) { // if we did not search all shards but had no serious failures that is potentially fine // if only the hit-count is wrong. this can happen if the cluster-state is behind when the @@ -134,7 +134,7 @@ public void run() { if (nonCriticalExceptions.isEmpty() == false) { logger.info("non-critical exceptions: {}", nonCriticalExceptions); for (int j = 0; j < 10; j++) { - assertHitCount(client().prepareSearch(), numDocs); + assertHitCount(prepareSearch(), numDocs); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 519e839c5d322..6f701e956788b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -131,8 +131,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe int docToQuery = between(0, numDocs - 1); int expectedResults = added[docToQuery] ? 1 : 0; logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery)); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))) + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))) .setSize(expectedResults) .get(); logger.info("Successful shards: [{}] numShards: [{}]", searchResponse.getSuccessfulShards(), test.numPrimaries); @@ -140,8 +139,7 @@ public void testRandomExceptions() throws IOException, InterruptedException, Exe assertResultsAndLogOnFailure(expectedResults, searchResponse); } // check match all - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchAllQuery()) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchAllQuery()) .setSize(numCreated) .addSort("_id", SortOrder.ASC) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java index dba26d0560a14..54ad0cd7e0cff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -32,8 +32,7 @@ import java.util.Collection; import java.util.concurrent.ExecutionException; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; public class SearchWithRandomIOExceptionsIT extends ESIntegTestCase { @@ -155,8 +154,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc int docToQuery = between(0, numDocs - 1); int expectedResults = added[docToQuery] ? 1 : 0; logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery)); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))) + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery))) .setSize(expectedResults) .get(); logger.info("Successful shards: [{}] numShards: [{}]", searchResponse.getSuccessfulShards(), numShards.numPrimaries); @@ -164,8 +162,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc assertResultsAndLogOnFailure(expectedResults, searchResponse); } // check match all - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchAllQuery()) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchAllQuery()) .setSize(numCreated + numInitialDocs) .addSort("_uid", SortOrder.ASC) .get(); @@ -193,9 +190,7 @@ public void testRandomDirectoryIOExceptions() throws IOException, InterruptedExc indicesAdmin().prepareClose("test").execute().get(); indicesAdmin().prepareOpen("test").execute().get(); ensureGreen(); - SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, numInitialDocs); + assertHitCountAndNoFailures(prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")), numInitialDocs); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 8ea750c624872..54cff6efe3d17 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -116,8 +116,7 @@ public void testDfsQueryThenFetch() throws Exception { refresh(); int total = 0; - SearchResponse searchResponse = client().prepareSearch("test") - .setSearchType(DFS_QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH) .setQuery(termQuery("multi", "test")) .setSize(60) .setExplain(true) @@ -157,8 +156,7 @@ public void testDfsQueryThenFetchWithSort() throws Exception { prepareData(); int total = 0; - SearchResponse searchResponse = client().prepareSearch("test") - .setSearchType(DFS_QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH) .setQuery(termQuery("multi", "test")) .setSize(60) .setExplain(true) @@ -195,8 +193,7 @@ public void testQueryThenFetch() throws Exception { prepareData(); int total = 0; - SearchResponse searchResponse = client().prepareSearch("test") - .setSearchType(QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch("test").setSearchType(QUERY_THEN_FETCH) .setQuery(termQuery("multi", "test")) .setSize(60) .setExplain(true) @@ -255,8 +252,7 @@ public void testQueryThenFetchWithSort() throws Exception { prepareData(); int total = 0; - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termQuery("multi", "test")) + SearchResponse searchResponse = prepareSearch("test").setQuery(termQuery("multi", "test")) .setSize(60) .setExplain(true) .addSort("age", SortOrder.ASC) @@ -357,9 +353,9 @@ public void testFailedMultiSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed multi search with a wrong query"); MultiSearchResponse response = client().prepareMultiSearch() - .add(client().prepareSearch("test").setQuery(new MatchQueryBuilder("foo", "biz"))) - .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) - .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test").setQuery(new MatchQueryBuilder("foo", "biz"))) + .add(prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) + .add(prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .get(); assertThat(response.getResponses().length, equalTo(3)); assertThat(response.getResponses()[0].getFailureMessage(), notNullValue()); @@ -381,16 +377,15 @@ public void testFailedMultiSearchWithWrongQueryWithFunctionScore() throws Except MultiSearchResponse response = client().prepareMultiSearch() // Add custom score query with bogus script .add( - client().prepareSearch("test") - .setQuery( - QueryBuilders.functionScoreQuery( - QueryBuilders.termQuery("nid", 1), - new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, "bar", "foo", Collections.emptyMap())) - ) + prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery( + QueryBuilders.termQuery("nid", 1), + new ScriptScoreFunctionBuilder(new Script(ScriptType.INLINE, "bar", "foo", Collections.emptyMap())) ) + ) ) - .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) - .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) + .add(prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .get(); assertThat(response.getResponses().length, equalTo(3)); assertThat(response.getResponses()[0].getFailureMessage(), notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index a5dad457005cb..b600098d82b33 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -39,7 +39,7 @@ import java.util.Objects; import static java.util.Collections.singletonList; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.CoreMatchers.equalTo; @@ -72,10 +72,10 @@ public void testPlugin() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse response = client().prepareSearch() - .setSource(new SearchSourceBuilder().ext(Collections.singletonList(new TermVectorsFetchBuilder("test")))) - .get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch().setSource( + new SearchSourceBuilder().ext(Collections.singletonList(new TermVectorsFetchBuilder("test"))) + ).get(); + assertNoFailures(response); assertThat( ((Map) response.getHits().getAt(0).field("term_vectors_fetch").getValues().get(0)).get("i"), equalTo(2) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 4041bbc431f75..00c5342577231 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -50,9 +50,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHit; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -141,11 +142,9 @@ public void testSimpleNested() throws Exception { ); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) - ) - .get(); + SearchResponse response = prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("1")); @@ -160,11 +159,9 @@ public void testSimpleNested() throws Exception { assertThat(innerHits.getAt(1).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(1).getNestedIdentity().getOffset(), equalTo(1)); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) - ) - .get(); + response = prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "elephant"), ScoreMode.Avg).innerHit(new InnerHitBuilder("comment")) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); @@ -183,17 +180,15 @@ public void testSimpleNested() throws Exception { assertThat(innerHits.getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(2).getNestedIdentity().getOffset(), equalTo(2)); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) - .setExplain(true) - .addFetchField("comments.mes*") - .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) - .setSize(1) - ) + response = prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message")) + .setExplain(true) + .addFetchField("comments.mes*") + .addScriptField("script", new Script(ScriptType.INLINE, MockScriptEngine.NAME, "5", Collections.emptyMap())) + .setSize(1) ) - .get(); + ).get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); assertThat(innerHits.getTotalHits().value, equalTo(2L)); @@ -209,13 +204,11 @@ public void testSimpleNested() throws Exception { ); assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5")); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().addDocValueField("comments.mes*").setSize(1) - ) + response = prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().addDocValueField("comments.mes*").setSize(1) ) - .get(); + ).get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); assertThat(innerHits.getHits().length, equalTo(1)); @@ -257,11 +250,7 @@ public void testRandomNested() throws Exception { new InnerHitBuilder("b").addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)).setSize(size) ) ); - SearchResponse searchResponse = client().prepareSearch("idx") - .setQuery(boolQuery) - .setSize(numDocs) - .addSort("foo", SortOrder.ASC) - .get(); + SearchResponse searchResponse = prepareSearch("idx").setQuery(boolQuery).setSize(numDocs).addSort("foo", SortOrder.ASC).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, numDocs); @@ -370,17 +359,15 @@ public void testNestedMultipleLayers() throws Exception { indexRandom(true, requests); // Check we can load the first doubly-nested document. - SearchResponse response = client().prepareSearch("articles") - .setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder()) - ) - .get(); + SearchResponse response = prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder()) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("1")); @@ -401,17 +388,15 @@ public void testNestedMultipleLayers() throws Exception { assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); // Check we can load the second doubly-nested document. - response = client().prepareSearch("articles") - .setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "neutral"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder()) - ) - .get(); + response = prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "neutral"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder()) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("1")); @@ -432,13 +417,9 @@ public void testNestedMultipleLayers() throws Exception { assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); // Directly refer to the second level: - response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit( - new InnerHitBuilder() - ) - ) - .get(); + response = prepareSearch("articles").setQuery( + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit(new InnerHitBuilder()) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); @@ -452,17 +433,15 @@ public void testNestedMultipleLayers() throws Exception { assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getField().string(), equalTo("remarks")); assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder()) - ) - .get(); + response = prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder()) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertSearchHit(response, 1, hasId("2")); @@ -483,8 +462,7 @@ public void testNestedMultipleLayers() throws Exception { assertThat(innerHits.getAt(0).getNestedIdentity().getChild().getOffset(), equalTo(0)); // Check that inner hits contain _source even when it's disabled on the parent request. - response = client().prepareSearch("articles") - .setFetchSource(false) + response = prepareSearch("articles").setFetchSource(false) .setQuery( nestedQuery( "comments", @@ -501,17 +479,15 @@ public void testNestedMultipleLayers() throws Exception { assertNotNull(innerHits.getAt(0).getSourceAsMap()); assertFalse(innerHits.getAt(0).getSourceAsMap().isEmpty()); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery( - "comments", - nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( - new InnerHitBuilder("remark") - ), - ScoreMode.Avg - ).innerHit(new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)) - ) - .get(); + response = prepareSearch("articles").setQuery( + nestedQuery( + "comments", + nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "good"), ScoreMode.Avg).innerHit( + new InnerHitBuilder("remark") + ), + ScoreMode.Avg + ).innerHit(new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)) + ).get(); assertNoFailures(response); innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); @@ -538,9 +514,9 @@ public void testNestedDefinedAsObject() throws Exception { ); indexRandom(true, requests); - SearchResponse response = client().prepareSearch("articles") - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder())) - .get(); + SearchResponse response = prepareSearch("articles").setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder()) + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); @@ -607,13 +583,11 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { ); indexRandom(true, requests); - SearchResponse resp1 = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.FETCH_SOURCE) - ) + SearchResponse resp1 = prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.FETCH_SOURCE) ) - .get(); + ).get(); assertNoFailures(resp1); assertHitCount(resp1, 1); SearchHit parent = resp1.getHits().getAt(0); @@ -623,13 +597,11 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); - SearchResponse response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) + SearchResponse response = prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); SearchHit hit = response.getHits().getAt(0); @@ -645,13 +617,11 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { assertThat(messages.getAt(1).getNestedIdentity().getOffset(), equalTo(0)); assertThat(messages.getAt(1).getNestedIdentity().getChild(), nullValue()); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) + response = prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); hit = response.getHits().getAt(0); @@ -680,13 +650,11 @@ public void testInnerHitsWithObjectFieldThatHasANestedField() throws Exception { ) ); indexRandom(true, requests); - response = client().prepareSearch("articles") - .setQuery( - nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) - ) + response = prepareSearch("articles").setQuery( + nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); hit = response.getHits().getAt(0); @@ -792,11 +760,7 @@ public void testMatchesQueriesNestedInnerHits() throws Exception { query = nestedQuery("nested1", query, ScoreMode.Avg).innerHit( new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC)) ); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(query) - .setSize(numDocs) - .addSort("field1", SortOrder.ASC) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC).get(); assertNoFailures(searchResponse); assertAllSuccessful(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)); @@ -848,13 +812,11 @@ public void testNestedSource() throws Exception { // the field name (comments.message) used for source filtering should be the same as when using that field for // other features (like in the query dsl or aggs) in order for consistency: - SearchResponse response = client().prepareSearch() - .setQuery( - nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit( - new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.of(true, new String[] { "comments.message" }, null)) - ) + SearchResponse response = prepareSearch().setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.of(true, new String[] { "comments.message" }, null)) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); @@ -870,9 +832,9 @@ public void testNestedSource() throws Exception { equalTo("fox ate rabbit x y z") ); - response = client().prepareSearch() - .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())) - .get(); + response = prepareSearch().setQuery( + nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder()) + ).get(); assertNoFailures(response); assertHitCount(response, 1); @@ -890,23 +852,18 @@ public void testNestedSource() throws Exception { // Source filter on a field that does not exist inside the nested document and just check that we do not fail and // return an empty _source: - response = client().prepareSearch() - .setQuery( - nestedQuery("comments", matchQuery("comments.message", "away"), ScoreMode.None).innerHit( - new InnerHitBuilder().setFetchSourceContext( - FetchSourceContext.of(true, new String[] { "comments.missing_field" }, null) - ) - ) + response = prepareSearch().setQuery( + nestedQuery("comments", matchQuery("comments.message", "away"), ScoreMode.None).innerHit( + new InnerHitBuilder().setFetchSourceContext(FetchSourceContext.of(true, new String[] { "comments.missing_field" }, null)) ) - .get(); + ).get(); assertNoFailures(response); assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); // Check that inner hits contain _source even when it's disabled on the root request. - response = client().prepareSearch() - .setFetchSource(false) + response = prepareSearch().setFetchSource(false) .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())) .get(); assertNoFailures(response); @@ -922,17 +879,16 @@ public void testInnerHitsWithIgnoreUnmapped() throws Exception { client().prepareIndex("index2").setId("3").setSource("key", "value").get(); refresh(); - SearchResponse response = client().prepareSearch("index1", "index2") - .setQuery( + assertSearchHitsWithoutFailures( + prepareSearch("index1", "index2").setQuery( boolQuery().should( nestedQuery("nested_type", matchAllQuery(), ScoreMode.None).ignoreUnmapped(true) .innerHit(new InnerHitBuilder().setIgnoreUnmapped(true)) ).should(termQuery("key", "value")) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 2); - assertSearchHits(response, "1", "3"); + ), + "1", + "3" + ); } public void testUseMaxDocInsteadOfSize() throws Exception { @@ -952,9 +908,7 @@ public void testUseMaxDocInsteadOfSize() throws Exception { QueryBuilder query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1) ); - SearchResponse response = client().prepareSearch("index2").setQuery(query).get(); - assertNoFailures(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures(prepareSearch("index2").setQuery(query), 1); } public void testTooHighResultWindow() throws Exception { @@ -966,25 +920,22 @@ public void testTooHighResultWindow() throws Exception { ) .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch("index2") - .setQuery( + assertHitCountAndNoFailures( + prepareSearch("index2").setQuery( nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( new InnerHitBuilder().setFrom(50).setSize(10).setName("_name") ) - ) - .get(); - assertNoFailures(response); - assertHitCount(response, 1); + ), + 1 + ); Exception e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("index2") - .setQuery( - nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFrom(100).setSize(10).setName("_name") - ) + () -> prepareSearch("index2").setQuery( + nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFrom(100).setSize(10).setName("_name") ) - .get() + ).get() ); assertThat( e.getCause().getMessage(), @@ -992,13 +943,11 @@ public void testTooHighResultWindow() throws Exception { ); e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("index2") - .setQuery( - nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFrom(10).setSize(100).setName("_name") - ) + () -> prepareSearch("index2").setQuery( + nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFrom(10).setSize(100).setName("_name") ) - .get() + ).get() ); assertThat( e.getCause().getMessage(), @@ -1007,20 +956,18 @@ public void testTooHighResultWindow() throws Exception { updateIndexSettings(Settings.builder().put(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), 110), "index2"); assertNoFailures( - client().prepareSearch("index2") - .setQuery( - nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFrom(100).setSize(10).setName("_name") - ) + prepareSearch("index2").setQuery( + nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFrom(100).setSize(10).setName("_name") ) + ) ); assertNoFailures( - client().prepareSearch("index2") - .setQuery( - nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( - new InnerHitBuilder().setFrom(10).setSize(100).setName("_name") - ) + prepareSearch("index2").setQuery( + nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit( + new InnerHitBuilder().setFrom(10).setSize(100).setName("_name") ) + ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java index c0bbbb48c09a2..d7347ef21328f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java @@ -46,15 +46,12 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { client().prepareIndex("test").setId("3").setSource("name", "test3", "number", 3).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(matchAllQuery()) - .filter( - boolQuery().should(rangeQuery("number").lt(2).queryName("test1")) - .should(rangeQuery("number").gte(2).queryName("test2")) - ) - ) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + boolQuery().must(matchAllQuery()) + .filter( + boolQuery().should(rangeQuery("number").lt(2).queryName("test1")).should(rangeQuery("number").gte(2).queryName("test2")) + ) + ).get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("3") || hit.getId().equals("2")) { @@ -70,11 +67,9 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { } } - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")) - ) - .get(); + searchResponse = prepareSearch().setQuery( + boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")) + ).get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1") || hit.getId().equals("2")) { @@ -100,8 +95,7 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { client().prepareIndex("test").setId("3").setSource("name", "test").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setPostFilter( boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) ) @@ -123,8 +117,7 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { } } - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .setPostFilter( boolQuery().should(termQuery("name", "test").queryName("name")).should(termQuery("title", "title1").queryName("title")) ) @@ -157,10 +150,9 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex client().prepareIndex("test").setId("3").setSource("name", "test", "title", "title3").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(boolQuery().must(matchAllQuery()).filter(termsQuery("title", "title1", "title2", "title3").queryName("title"))) - .setPostFilter(termQuery("name", "test").queryName("name")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + boolQuery().must(matchAllQuery()).filter(termsQuery("title", "title1", "title2", "title3").queryName("title")) + ).setPostFilter(termQuery("name", "test").queryName("name")).get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { @@ -174,8 +166,7 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex } } - searchResponse = client().prepareSearch() - .setQuery(termsQuery("title", "title1", "title2", "title3").queryName("title")) + searchResponse = prepareSearch().setQuery(termsQuery("title", "title1", "title2", "title3").queryName("title")) .setPostFilter(matchQuery("name", "test").queryName("name")) .get(); assertHitCount(searchResponse, 3L); @@ -199,9 +190,7 @@ public void testRegExpQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")).get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { @@ -222,9 +211,7 @@ public void testPrefixQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")).get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { @@ -245,9 +232,7 @@ public void testFuzzyQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.fuzzyQuery("title", "titel1").queryName("fuzzy")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.fuzzyQuery("title", "titel1").queryName("fuzzy")).get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { @@ -268,9 +253,7 @@ public void testWildcardQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")).get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { @@ -291,9 +274,9 @@ public void testSpanFirstQuerySupportsName() { client().prepareIndex("test1").setId("1").setSource("title", "title1 title2").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.spanFirstQuery(QueryBuilders.spanTermQuery("title", "title1"), 10).queryName("span")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + QueryBuilders.spanFirstQuery(QueryBuilders.spanTermQuery("title", "title1"), 10).queryName("span") + ).get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { @@ -321,13 +304,11 @@ public void testMatchedWithShould() throws Exception { // Execute search at least two times to load it in cache int iter = scaledRandomIntBetween(2, 10); for (int i = 0; i < iter; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery( - boolQuery().minimumShouldMatch(1) - .should(queryStringQuery("dolor").queryName("dolor")) - .should(queryStringQuery("elit").queryName("elit")) - ) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + boolQuery().minimumShouldMatch(1) + .should(queryStringQuery("dolor").queryName("dolor")) + .should(queryStringQuery("elit").queryName("elit")) + ).get(); assertHitCount(searchResponse, 2L); for (SearchHit hit : searchResponse.getHits()) { @@ -359,7 +340,7 @@ public void testMatchedWithWrapperQuery() throws Exception { BytesReference termBytes = XContentHelper.toXContent(termQueryBuilder, XContentType.JSON, false); QueryBuilder[] queries = new QueryBuilder[] { wrapperQuery(matchBytes), constantScoreQuery(wrapperQuery(termBytes)) }; for (QueryBuilder query : queries) { - SearchResponse searchResponse = client().prepareSearch().setQuery(query).get(); + SearchResponse searchResponse = prepareSearch().setQuery(query).get(); assertHitCount(searchResponse, 1L); SearchHit hit = searchResponse.getHits().getAt(0); assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); @@ -376,8 +357,7 @@ public void testMatchedWithRescoreQuery() throws Exception { client().prepareIndex("test").setId("2").setSource("content", "hello you").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(new MatchAllQueryBuilder().queryName("all")) + SearchResponse searchResponse = prepareSearch().setQuery(new MatchAllQueryBuilder().queryName("all")) .setRescorer( new QueryRescorerBuilder(new MatchPhraseQueryBuilder("content", "hello you").boost(10).queryName("rescore_phrase")) ) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index 8d340b8401b60..6b790f9e6f090 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -45,8 +45,7 @@ protected void setup() throws Exception { } public void testThatCustomHighlightersAreSupported() throws IOException { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom")) .get(); assertHighlight(searchResponse, 0, "name", 0, equalTo("standard response for name at position 1")); @@ -59,8 +58,7 @@ public void testThatCustomHighlighterCanBeConfiguredPerField() throws Exception options.put("myFieldOption", "someValue"); highlightConfig.options(options); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .highlighter(new HighlightBuilder().field(highlightConfig)) .get(); @@ -72,8 +70,7 @@ public void testThatCustomHighlighterCanBeConfiguredGlobally() throws Exception Map options = new HashMap<>(); options.put("myGlobalOption", "someValue"); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options)) .get(); @@ -82,8 +79,9 @@ public void testThatCustomHighlighterCanBeConfiguredGlobally() throws Exception } public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary"))) + SearchResponse searchResponse = prepareSearch("test").setQuery( + QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary")) + ) .highlighter( new HighlightBuilder().highlighterType("test-custom") .field("name") diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 9da65214599e1..79a28a053b3c2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -65,6 +65,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.concurrent.ExecutionException; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; @@ -91,9 +92,9 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNotHighlighted; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -138,8 +139,7 @@ public void testHighlightingWithKeywordIgnoreBoundaryScanner() throws IOExceptio refresh(); for (BoundaryScannerType scanner : BoundaryScannerType.values()) { - SearchResponse search = client().prepareSearch() - .addSort(SortBuilders.fieldSort("sort")) + SearchResponse search = prepareSearch().addSort(SortBuilders.fieldSort("sort")) .setQuery(matchQuery("tags", "foo bar")) .highlighter(new HighlightBuilder().field(new Field("tags")).numOfFragments(2).boundaryScannerType(scanner)) .get(); @@ -164,8 +164,7 @@ public void testHighlightingWithStoredKeyword() throws IOException { assertAcked(prepareCreate("test").setMapping(mappings)); client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "foo").endObject()).get(); refresh(); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("text", "foo")) + SearchResponse search = prepareSearch().setQuery(matchQuery("text", "foo")) .highlighter(new HighlightBuilder().field(new Field("text"))) .get(); assertHighlight(search, 0, "text", 0, equalTo("foo")); @@ -190,8 +189,7 @@ public void testHighlightingWithWildcardName() throws IOException { client().prepareIndex("test").setId("1").setSource(jsonBuilder().startObject().field("text", "text").endObject()).get(); refresh(); for (String type : ALL_TYPES) { - SearchResponse search = client().prepareSearch() - .setQuery(constantScoreQuery(matchQuery("text", "text"))) + SearchResponse search = prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(type))) .get(); assertHighlight(search, 0, "text", 0, equalTo("text")); @@ -222,7 +220,7 @@ public void testFieldAlias() throws IOException { for (String type : ALL_TYPES) { HighlightBuilder builder = new HighlightBuilder().field(new Field("alias").highlighterType(type)) .requireFieldMatch(randomBoolean()); - SearchResponse search = client().prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder).get(); + SearchResponse search = prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder).get(); assertHighlight(search, 0, "alias", 0, equalTo("foo")); } } @@ -252,7 +250,7 @@ public void testFieldAliasWithSourceLookup() throws IOException { for (String type : ALL_TYPES) { HighlightBuilder builder = new HighlightBuilder().field(new Field("alias").highlighterType(type)) .requireFieldMatch(randomBoolean()); - SearchResponse search = client().prepareSearch().setQuery(matchQuery("alias", "bar")).highlighter(builder).get(); + SearchResponse search = prepareSearch().setQuery(matchQuery("alias", "bar")).highlighter(builder).get(); assertHighlight(search, 0, "alias", 0, equalTo("foo bar")); } } @@ -277,7 +275,7 @@ public void testFieldAliasWithWildcardField() throws IOException { refresh(); HighlightBuilder builder = new HighlightBuilder().field(new Field("al*")).requireFieldMatch(false); - SearchResponse search = client().prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder).get(); + SearchResponse search = prepareSearch().setQuery(matchQuery("alias", "foo")).highlighter(builder).get(); assertHighlight(search, 0, "alias", 0, equalTo("foo")); } @@ -311,13 +309,11 @@ public void testHighlightingWhenFieldsAreNotStoredThereIsNoSource() throws IOExc .get(); refresh(); for (String type : ALL_TYPES) { - SearchResponse search = client().prepareSearch() - .setQuery(constantScoreQuery(matchQuery("text", "text"))) + SearchResponse search = prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(type))) .get(); assertHighlight(search, 0, "text", 0, equalTo("text")); - search = client().prepareSearch() - .setQuery(constantScoreQuery(matchQuery("text", "text"))) + search = prepareSearch().setQuery(constantScoreQuery(matchQuery("text", "text"))) .highlighter(new HighlightBuilder().field(new Field("unstored_text"))) .get(); assertNoFailures(search); @@ -334,8 +330,7 @@ public void testHighTermFrequencyDoc() throws IOException { } client().prepareIndex("test").setId("1").setSource("name", builder.toString()).get(); refresh(); - SearchResponse search = client().prepareSearch() - .setQuery(constantScoreQuery(matchQuery("name", "abc"))) + SearchResponse search = prepareSearch().setQuery(constantScoreQuery(matchQuery("name", "abc"))) .highlighter(new HighlightBuilder().field("name")) .get(); assertHighlight(search, 0, "name", 0, startsWith("abc abc abc abc")); @@ -361,24 +356,33 @@ public void testEnsureNoNegativeOffsets() throws Exception { ) .get(); refresh(); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighed")) - .highlighter(new HighlightBuilder().field("long_term", 18, 1).highlighterType("fvh")) - .get(); - assertHighlight(search, 0, "long_term", 0, 1, equalTo("thisisaverylongwordandmakessurethisfails")); - - search = client().prepareSearch() - .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) - .get(); - assertNotHighlighted(search, 0, "no_long_term"); + assertHighlight( + prepareSearch().setQuery(matchQuery("long_term", "thisisaverylongwordandmakessurethisfails foo highlighed")) + .highlighter(new HighlightBuilder().field("long_term", 18, 1).highlighterType("fvh")), + 0, + "long_term", + 0, + 1, + equalTo("thisisaverylongwordandmakessurethisfails") + ); - search = client().prepareSearch() - .setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) - .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).highlighterType("fvh").postTags("").preTags("")) - .get(); + assertNotHighlighted( + prepareSearch().setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) + .highlighter(new HighlightBuilder().field("no_long_term", 18, 1).highlighterType("fvh").postTags("").preTags("")) + .get(), + 0, + "no_long_term" + ); - assertHighlight(search, 0, "no_long_term", 0, 1, equalTo("a test where foo is highlighed and")); + assertHighlight( + prepareSearch().setQuery(matchPhraseQuery("no_long_term", "test foo highlighed").slop(3)) + .highlighter(new HighlightBuilder().field("no_long_term", 30, 1).highlighterType("fvh").postTags("").preTags("")), + 0, + "no_long_term", + 0, + 1, + equalTo("a test where foo is highlighed and") + ); } public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception { @@ -429,8 +433,7 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) .highlighter(new HighlightBuilder().field("title", -1, 0)) .get(); @@ -438,8 +441,7 @@ public void testSourceLookupHighlightingUsingPlainHighlighter() throws Exception assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); } - search = client().prepareSearch() - .setQuery(matchQuery("attachments.body", "attachment")) + search = prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) .highlighter(new HighlightBuilder().field("attachments.body", -1, 0)) .get(); @@ -498,8 +500,7 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) .highlighter(new HighlightBuilder().field("title", -1, 0)) .get(); @@ -507,8 +508,7 @@ public void testSourceLookupHighlightingUsingFastVectorHighlighter() throws Exce assertHighlight(search, i, "title", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); } - search = client().prepareSearch() - .setQuery(matchQuery("attachments.body", "attachment")) + search = prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) .execute() .get(); @@ -571,8 +571,7 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) // asking for the whole field to be highlighted .highlighter(new HighlightBuilder().field("title", -1, 0)) .get(); @@ -588,8 +587,7 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except assertHighlight(search, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); } - search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + search = prepareSearch().setQuery(matchQuery("title", "bug")) // sentences will be generated out of each value .highlighter(new HighlightBuilder().field("title")) .get(); @@ -605,8 +603,7 @@ public void testSourceLookupHighlightingUsingPostingsHighlighter() throws Except assertHighlight(search, i, "title", 1, 2, equalTo("This is the second bug to perform highlighting on.")); } - search = client().prepareSearch() - .setQuery(matchQuery("attachments.body", "attachment")) + search = prepareSearch().setQuery(matchQuery("attachments.body", "attachment")) .highlighter(new HighlightBuilder().field("attachments.body", -1, 2)) .get(); @@ -634,8 +631,7 @@ public void testHighlightIssue1994() throws Exception { client().prepareIndex("test").setId("2").setSource("titleTV", new String[] { "some text to highlight", "highlight other text" }) ); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) .highlighter(new HighlightBuilder().field("title", -1, 2).field("titleTV", -1, 2).requireFieldMatch(false)) .get(); @@ -644,8 +640,7 @@ public void testHighlightIssue1994() throws Exception { assertHighlight(search, 0, "titleTV", 0, equalTo("This is a test on the highlighting bug present in elasticsearch")); assertHighlight(search, 0, "titleTV", 1, 2, equalTo("The bug is bugging us")); - search = client().prepareSearch() - .setQuery(matchQuery("titleTV", "highlight")) + search = prepareSearch().setQuery(matchQuery("titleTV", "highlight")) .highlighter(new HighlightBuilder().field("titleTV", -1, 2)) .get(); @@ -684,7 +679,7 @@ public void testGlobalHighlightingSettingsOverriddenAtFieldLevel() { ) ); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field1", 0, 2, equalTo("test")); assertHighlight(searchResponse, 0, "field1", 1, 2, equalTo("test")); @@ -745,10 +740,7 @@ public void testPlainHighlighter() throws Exception { SearchSourceBuilder source = searchSource().query(termQuery("field1", "test")) .highlighter(highlight().highlighterType("plain").field("field1").order("score").preTags("").postTags("")); - - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); + assertHighlight(prepareSearch("test").setSource(source), 0, "field1", 0, 1, equalTo("this is a test")); } public void testPlainHighlighterOrder() throws Exception { @@ -764,7 +756,7 @@ public void testPlainHighlighterOrder() throws Exception { SearchSourceBuilder source = searchSource().query(matchQuery("field1", "brown dog")) .highlighter(highlight().highlighterType("plain").field("field1").preTags("").postTags("").fragmentSize(25)); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field1", 0, 3, equalTo("The quick brown fox")); assertHighlight(searchResponse, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); @@ -776,7 +768,7 @@ public void testPlainHighlighterOrder() throws Exception { highlight().highlighterType("plain").field("field1").order("none").preTags("").postTags("").fragmentSize(25) ); - searchResponse = client().prepareSearch("test").setSource(source).get(); + searchResponse = prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field1", 0, 3, equalTo("The quick brown fox")); assertHighlight(searchResponse, 0, "field1", 1, 3, equalTo(" jumps over the lazy brown dog")); @@ -789,7 +781,7 @@ public void testPlainHighlighterOrder() throws Exception { highlight().highlighterType("plain").order("score").field("field1").preTags("").postTags("").fragmentSize(25) ); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); assertHighlight(searchResponse, 0, "field1", 0, 3, equalTo(" jumps over the lazy brown dog")); assertHighlight(searchResponse, 0, "field1", 1, 3, equalTo("The quick brown fox")); @@ -810,25 +802,19 @@ public void testFastVectorHighlighter() throws Exception { SearchSourceBuilder source = searchSource().query(termQuery("field1", "test")) .highlighter(highlight().field("field1", 100, 0).order("score").preTags("").postTags("")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); + assertHighlight(prepareSearch("test").setSource(source), 0, "field1", 0, 1, equalTo("this is a test")); logger.info("--> searching with boundary characters"); source = searchSource().query(matchQuery("field2", "quick")) .highlighter(highlight().field("field2", 30, 1).boundaryChars(new char[] { ' ' })); - searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over")); + assertHighlight(prepareSearch("test").setSource(source), 0, "field2", 0, 1, equalTo("The quick brown fox jumps over")); logger.info("--> searching with boundary characters on the field"); source = searchSource().query(matchQuery("field2", "quick")) .highlighter(highlight().field(new Field("field2").fragmentSize(30).numOfFragments(1).boundaryChars(new char[] { ' ' }))); - searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over")); + assertHighlight(prepareSearch("test").setSource(source), 0, "field2", 0, 1, equalTo("The quick brown fox jumps over")); } public void testHighlighterWithSentenceBoundaryScanner() throws Exception { @@ -850,7 +836,7 @@ public void testHighlighterWithSentenceBoundaryScanner() throws Exception { .postTags("") .boundaryScannerType(BoundaryScannerType.SENTENCE) ); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); assertHighlight( searchResponse, @@ -893,7 +879,7 @@ public void testHighlighterWithSentenceBoundaryScannerAndLocale() throws Excepti .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag()) ); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); assertHighlight( searchResponse, @@ -932,10 +918,8 @@ public void testHighlighterWithWordBoundaryScanner() throws Exception { .boundaryScannerType(BoundaryScannerType.WORD) ); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field1", 0, @@ -962,10 +946,8 @@ public void testHighlighterWithWordBoundaryScannerAndLocale() throws Exception { .boundaryScannerLocale(Locale.ENGLISH.toLanguageTag()) ); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field1", 0, @@ -1086,7 +1068,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .fragmentSize(25) .highlighterType("fvh") .requireFieldMatch(requireFieldMatch); - SearchRequestBuilder req = client().prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); + SearchRequestBuilder req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); // First check highlighting without any matched fields set SearchResponse resp = req.setQuery(queryStringQuery("running scissors").field("foo")).get(); @@ -1104,7 +1086,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .highlighterType("fvh") .requireFieldMatch(requireFieldMatch); fooField.matchedFields("foo", "foo.plain"); - req = client().prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); + req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); resp = req.setQuery(queryStringQuery("running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); @@ -1119,7 +1101,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .highlighterType("fvh") .requireFieldMatch(requireFieldMatch); fooField.matchedFields("foo.plain"); - req = client().prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); + req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); resp = req.setQuery(queryStringQuery("foo.plain:running scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); @@ -1130,7 +1112,7 @@ private void checkMatchedFieldsCase(boolean requireFieldMatch) throws Exception .highlighterType("fvh") .requireFieldMatch(requireFieldMatch); fooField.matchedFields("foo", "foo.plain"); - req = client().prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); + req = prepareSearch("test").highlighter(new HighlightBuilder().field(fooField)); resp = req.setQuery(queryStringQuery("foo.plain:running^5 scissors").field("foo")).get(); assertHighlight(resp, 0, "foo", 0, equalTo("running with scissors")); @@ -1226,8 +1208,7 @@ public void testFastVectorHighlighterManyDocs() throws Exception { indexRandom(true, indexRequestBuilders); logger.info("--> searching explicitly on field1 and highlighting on it"); - SearchResponse searchResponse = client().prepareSearch() - .setSize(COUNT) + SearchResponse searchResponse = prepareSearch().setSize(COUNT) .setQuery(termQuery("field1", "test")) .highlighter(new HighlightBuilder().field("field1", 100, 0)) .get(); @@ -1267,8 +1248,7 @@ public void testSameContent() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) .highlighter(new HighlightBuilder().field("title", -1, 0)) .get(); @@ -1288,8 +1268,7 @@ public void testFastVectorHighlighterOffsetParameter() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "bug")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "bug")) .highlighter(new HighlightBuilder().field("title", 30, 1, 10).highlighterType("fvh")) .get(); @@ -1310,8 +1289,7 @@ public void testEscapeHtml() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "test")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "test")) .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1, 10)) .get(); @@ -1331,8 +1309,7 @@ public void testEscapeHtmlVector() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "test")) + SearchResponse search = prepareSearch().setQuery(matchQuery("title", "test")) .highlighter(new HighlightBuilder().encoder("html").field("title", 30, 1, 10).highlighterType("plain")) .get(); @@ -1371,20 +1348,26 @@ public void testMultiMapperVectorWithStore() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) - .get(); - - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)), + 0, + "title", + 0, + 1, + equalTo("this is a test") + ); // search on title.key and highlight on title - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) - .get(); - - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)), + 0, + "title.key", + 0, + 1, + equalTo("this is a test") + ); } public void testMultiMapperVectorFromSource() throws Exception { @@ -1418,20 +1401,26 @@ public void testMultiMapperVectorFromSource() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) - .get(); - - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)), + 0, + "title", + 0, + 1, + equalTo("this is a test") + ); // search on title.key and highlight on title.key - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) - .get(); - - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)), + 0, + "title.key", + 0, + 1, + equalTo("this is a test") + ); } public void testMultiMapperNoVectorWithStore() throws Exception { @@ -1465,20 +1454,26 @@ public void testMultiMapperNoVectorWithStore() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) - .get(); - - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)), + 0, + "title", + 0, + 1, + equalTo("this is a test") + ); // search on title.key and highlight on title - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) - .get(); - - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)), + 0, + "title.key", + 0, + 1, + equalTo("this is a test") + ); } public void testMultiMapperNoVectorFromSource() throws Exception { @@ -1511,20 +1506,26 @@ public void testMultiMapperNoVectorFromSource() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse search = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)) - .get(); - - assertHighlight(search, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title", 50, 1)), + 0, + "title", + 0, + 1, + equalTo("this is a test") + ); // search on title.key and highlight on title.key - search = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)) - .get(); - - assertHighlight(search, 0, "title.key", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title.key", "this is a test")) + .highlighter(new HighlightBuilder().encoder("html").field("title.key", 50, 1)), + 0, + "title.key", + 0, + 1, + equalTo("this is a test") + ); } public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exception { @@ -1540,14 +1541,12 @@ public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exceptio indexRandom(true, indexRequestBuilders); assertNoFailures( - client().prepareSearch() - .setQuery(matchPhraseQuery("title", "this is a test")) + prepareSearch().setQuery(matchPhraseQuery("title", "this is a test")) .highlighter(new HighlightBuilder().field("title", 50, 1, 10)) ); assertFailures( - client().prepareSearch() - .setQuery(matchPhraseQuery("title", "this is a test")) + prepareSearch().setQuery(matchPhraseQuery("title", "this is a test")) .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")), RestStatus.BAD_REQUEST, containsString( @@ -1557,8 +1556,7 @@ public void testFastVectorHighlighterShouldFailIfNoTermVectors() throws Exceptio // should not fail if there is a wildcard assertNoFailures( - client().prepareSearch() - .setQuery(matchPhraseQuery("title", "this is a test")) + prepareSearch().setQuery(matchPhraseQuery("title", "this is a test")) .highlighter(new HighlightBuilder().field("tit*", 50, 1, 10).highlighterType("fvh")) ); } @@ -1575,8 +1573,7 @@ public void testDisableFastVectorHighlighter() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse search = client().prepareSearch() - .setQuery(matchPhraseQuery("title", "test for the workaround")) + SearchResponse search = prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("fvh")) .get(); @@ -1586,8 +1583,7 @@ public void testDisableFastVectorHighlighter() throws Exception { } // Using plain highlighter instead of FVH - search = client().prepareSearch() - .setQuery(matchPhraseQuery("title", "test for the workaround")) + search = prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) .highlighter(new HighlightBuilder().field("title", 50, 1, 10).highlighterType("plain")) .get(); @@ -1603,8 +1599,7 @@ public void testDisableFastVectorHighlighter() throws Exception { } // Using plain highlighter instead of FVH on the field level - search = client().prepareSearch() - .setQuery(matchPhraseQuery("title", "test for the workaround")) + search = prepareSearch().setQuery(matchPhraseQuery("title", "test for the workaround")) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("title").highlighterType("plain")).highlighterType("plain") ) @@ -1636,8 +1631,7 @@ public void testFSHHighlightAllMvFragments() throws Exception { .get(); refresh(); - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("tags", "tag")) + SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("tags", "tag")) .highlighter(new HighlightBuilder().field("tags", -1, 0).highlighterType("fvh")) .get(); @@ -1662,10 +1656,14 @@ public void testBoostingQuery() { SearchSourceBuilder source = searchSource().query( boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f) ).highlighter(highlight().field("field2").order("score").preTags("").postTags("")); - - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + assertHighlight( + prepareSearch("test").setSource(source), + 0, + "field2", + 0, + 1, + equalTo("The quick brown fox jumps over the lazy dog") + ); } public void testBoostingQueryTermVector() throws IOException { @@ -1678,10 +1676,14 @@ public void testBoostingQueryTermVector() throws IOException { SearchSourceBuilder source = searchSource().query( boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f) ).highlighter(highlight().field("field2").order("score").preTags("").postTags("")); - - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog")); + assertHighlight( + prepareSearch("test").setSource(source), + 0, + "field2", + 0, + 1, + equalTo("The quick brown fox jumps over the lazy dog") + ); } public void testPlainHighlightDifferentFragmenter() throws Exception { @@ -1701,8 +1703,7 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { .get(); refresh(); - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) + SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field( new HighlightBuilder.Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("simple") @@ -1720,8 +1721,7 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { equalTo("here is another one that is very long tag and has the tag token near the end") ); - response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) + response = prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field( new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("span") @@ -1740,8 +1740,7 @@ public void testPlainHighlightDifferentFragmenter() throws Exception { ); assertFailures( - client().prepareSearch("test") - .setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) + prepareSearch("test").setQuery(QueryBuilders.matchPhraseQuery("tags", "long tag")) .highlighter( new HighlightBuilder().field( new Field("tags").highlighterType("plain").fragmentSize(-1).numOfFragments(2).fragmenter("invalid") @@ -1759,8 +1758,7 @@ public void testPlainHighlighterMultipleFields() { indexDoc("test", "1", "field1", "The quick brown fox", "field2", "The slow brown fox"); refresh(); - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field1", "fox")) + SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("field1").preTags("<1>").postTags("").requireFieldMatch(true)) .field(new HighlightBuilder.Field("field2").preTags("<2>").postTags("").requireFieldMatch(false)) @@ -1784,8 +1782,7 @@ public void testFastVectorHighlighterMultipleFields() { indexDoc("test", "1", "field1", "The quick brown fox", "field2", "The slow brown fox"); refresh(); - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field1", "fox")) + SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) .highlighter( new HighlightBuilder().field(new HighlightBuilder.Field("field1").preTags("<1>").postTags("").requireFieldMatch(true)) .field(new HighlightBuilder.Field("field2").preTags("<2>").postTags("").requireFieldMatch(false)) @@ -1802,8 +1799,7 @@ public void testMissingStoredField() throws Exception { refresh(); // This query used to fail when the field to highlight was absent - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field", "highlight")) + SearchResponse response = prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "highlight")) .highlighter( new HighlightBuilder().field( new HighlightBuilder.Field("highlight_field").fragmentSize(-1).numOfFragments(1).fragmenter("simple") @@ -1844,8 +1840,7 @@ public void testNumericHighlighting() throws Exception { // Highlighting of numeric fields is not supported, but it should not raise errors // (this behavior is consistent with version 0.20) assertHitCount( - client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test")) + prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "test")) .highlighter( new HighlightBuilder().field("text") .field("byte") @@ -1872,9 +1867,7 @@ public void testResetTwice() throws Exception { // Mock tokenizer will throw an exception if it is resetted twice assertHitCount( - client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "test")) - .highlighter(new HighlightBuilder().field("text")), + prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "test")).highlighter(new HighlightBuilder().field("text")), 1L ); } @@ -1894,8 +1887,7 @@ public void testHighlightUsesHighlightQuery() throws IOException { for (String type : ALL_TYPES) { HighlightBuilder.Field field = new HighlightBuilder.Field("text"); HighlightBuilder highlightBuilder = new HighlightBuilder().field(field).highlighterType(type); - SearchRequestBuilder search = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("text", "testing")) + SearchRequestBuilder search = prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")) .highlighter(highlightBuilder); Matcher searchQueryMatcher = equalTo("Testing the highlight query feature"); @@ -1906,7 +1898,7 @@ public void testHighlightUsesHighlightQuery() throws IOException { Matcher hlQueryMatcher = equalTo("Testing the highlight query feature"); field.highlightQuery(matchQuery("text", "query")); highlightBuilder = new HighlightBuilder().field(field); - search = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")).highlighter(highlightBuilder); + search = prepareSearch("test").setQuery(QueryBuilders.matchQuery("text", "testing")).highlighter(highlightBuilder); response = search.get(); assertHighlight(response, 0, "text", 0, hlQueryMatcher); @@ -1940,99 +1932,120 @@ public void testHighlightNoMatchSize() throws IOException { // When you don't set noMatchSize you don't get any results if there isn't anything to highlight. HighlightBuilder.Field field = new HighlightBuilder.Field("text").fragmentSize(21).numOfFragments(1).highlighterType("plain"); - SearchResponse response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); // When noMatchSize is set to 0 you also shouldn't get any field.highlighterType("plain").noMatchSize(0); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); // When noMatchSize is between 0 and the size of the string field.highlighterType("plain").noMatchSize(21); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so") + ); // The FVH also works but the fragment is longer than the plain highlighter because of boundary_max_scan field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so some") + ); // Unified hl also works but the fragment is longer than the plain highlighter because of the boundary is the word field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so some") + ); // We can also ask for a fragment longer than the input string and get the whole string for (String type : new String[] { "plain", "unified" }) { field.highlighterType(type).noMatchSize(text.length() * 2).numOfFragments(0); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + assertHighlight(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text", 0, 1, equalTo(text)); } field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + assertHighlight(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text", 0, 1, equalTo(text)); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + assertHighlight(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text", 0, 1, equalTo(text)); // We can also ask for a fragment exactly the size of the input field and get the whole field field.highlighterType("plain").noMatchSize(text.length()); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + assertHighlight(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text", 0, 1, equalTo(text)); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + assertHighlight(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text", 0, 1, equalTo(text)); // unified hl returns the first sentence as the noMatchSize does not cross sentence boundary. field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo(text)); + assertHighlight(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text", 0, 1, equalTo(text)); // You can set noMatchSize globally in the highlighter as well field.highlighterType("plain").noMatchSize(null); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so") + ); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so some") + ); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field).noMatchSize(21)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so some") + ); // We don't break if noMatchSize is less than zero though field.highlighterType("plain").noMatchSize(randomIntBetween(Integer.MIN_VALUE, -1)); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); } public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { @@ -2054,16 +2067,34 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { .numOfFragments(1) .highlighterType("plain") .noMatchSize(21); - SearchResponse response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so") + ); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so some") + ); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am pretty long so some")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am pretty long so some") + ); // And noMatchSize returns nothing when the first entry is empty string! indexDoc("test", "2", "text", new String[] { "", text2 }); @@ -2071,33 +2102,34 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery().addIds("2"); field.highlighterType("plain"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); // except for the unified highlighter which starts from the first string with actual content field.highlighterType("unified"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("I am short")); + assertHighlight( + prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("I am short") + ); // But if the field was actually empty then you should get no highlighting field indexDoc("test", "3", "text", new String[] {}); refresh(); idsQueryBuilder = QueryBuilders.idsQuery().addIds("3"); field.highlighterType("plain"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("unified"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); // Same for if the field doesn't even exist on the document indexDoc("test", "4"); @@ -2105,29 +2137,27 @@ public void testHighlightNoMatchSizeWithMultivaluedFields() throws IOException { idsQueryBuilder = QueryBuilders.idsQuery().addIds("4"); field.highlighterType("plain"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("unified"); - response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "postings"); + assertNotHighlighted( + prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)), + 0, + "postings" + ); // Again same if the field isn't mapped field = new HighlightBuilder.Field("unmapped").highlighterType("plain").noMatchSize(21); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertNotHighlighted(response, 0, "text"); + assertNotHighlighted(prepareSearch("test").highlighter(new HighlightBuilder().field(field)), 0, "text"); } public void testHighlightNoMatchSizeNumberOfFragments() { @@ -2150,31 +2180,49 @@ public void testHighlightNoMatchSizeNumberOfFragments() { .numOfFragments(0) .highlighterType("plain") .noMatchSize(20); - SearchResponse response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("This is the first") + ); field.highlighterType("fvh"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("This is the first sentence") + ); field.highlighterType("unified"); - response = client().prepareSearch("test").highlighter(new HighlightBuilder().field(field)).get(); - assertHighlight(response, 0, "text", 0, 1, equalTo("This is the first sentence")); + assertHighlight( + prepareSearch("test").highlighter(new HighlightBuilder().field(field)), + 0, + "text", + 0, + 1, + equalTo("This is the first sentence") + ); // if there's a match we only return the values with matches (whole value as number_of_fragments == 0) MatchQueryBuilder queryBuilder = QueryBuilders.matchQuery("text", "third fifth"); field.highlighterType("plain"); - response = client().prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); + SearchResponse response = prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); field.highlighterType("fvh"); - response = client().prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); + response = prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); field.highlighterType("unified"); - response = client().prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); + response = prepareSearch("test").setQuery(queryBuilder).highlighter(new HighlightBuilder().field(field)).get(); assertHighlight(response, 0, "text", 0, 2, equalTo("This is the third sentence. This is the fourth sentence.")); assertHighlight(response, 0, "text", 1, 2, equalTo("This is the fifth sentence")); } @@ -2191,26 +2239,18 @@ public void testPostingsHighlighter() throws Exception { logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(termQuery("field1", "test")) .highlighter(highlight().field("field1").preTags("").postTags("")); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); + assertHighlight(client().search(new SearchRequest("test").source(source)), 0, "field1", 0, 1, equalTo("this is a test")); logger.info("--> searching on field1, highlighting on field1"); source = searchSource().query(termQuery("field1", "test")) .highlighter(highlight().field("field1").preTags("").postTags("")); - - searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("this is a test")); + assertHighlight(client().search(new SearchRequest("test").source(source)), 0, "field1", 0, 1, equalTo("this is a test")); logger.info("--> searching on field2, highlighting on field2"); source = searchSource().query(termQuery("field2", "quick")) .highlighter(highlight().field("field2").order("score").preTags("").postTags("")); - - searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHighlight( - searchResponse, + client().search(new SearchRequest("test").source(source)), 0, "field2", 0, @@ -2221,20 +2261,21 @@ public void testPostingsHighlighter() throws Exception { logger.info("--> searching on field2, highlighting on field2"); source = searchSource().query(matchPhraseQuery("field2", "quick brown")) .highlighter(highlight().field("field2").preTags("").postTags("")); - - searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy quick dog")); + assertHighlight( + client().search(new SearchRequest("test").source(source)), + 0, + "field2", + 0, + 1, + equalTo("The quick brown fox jumps over the lazy quick dog") + ); // lets fall back to the standard highlighter then, what people would do to highlight query matches logger.info("--> searching on field2, highlighting on field2, falling back to the plain highlighter"); source = searchSource().query(matchPhraseQuery("field2", "quick brown")) .highlighter(highlight().field("field2").preTags("").postTags("").highlighterType("plain").requireFieldMatch(false)); - - searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - assertHighlight( - searchResponse, + client().search(new SearchRequest("test").source(source)), 0, "field2", 0, @@ -2256,12 +2297,15 @@ public void testPostingsHighlighterMultipleFields() throws Exception { "The slow brown fox. Second sentence." ); refresh(); - - SearchResponse response = client().prepareSearch("test") - .setQuery(QueryBuilders.matchQuery("field1", "fox")) - .highlighter(new HighlightBuilder().field(new Field("field1").preTags("<1>").postTags("").requireFieldMatch(true))) - .get(); - assertHighlight(response, 0, "field1", 0, 1, equalTo("The quick brown <1>fox. Second sentence.")); + assertHighlight( + prepareSearch("test").setQuery(QueryBuilders.matchQuery("field1", "fox")) + .highlighter(new HighlightBuilder().field(new Field("field1").preTags("<1>").postTags("").requireFieldMatch(true))), + 0, + "field1", + 0, + 1, + equalTo("The quick brown <1>fox. Second sentence.") + ); } public void testPostingsHighlighterNumberOfFragments() throws Exception { @@ -2506,8 +2550,7 @@ public void testPostingsHighlighterEscapeHtml() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("title", "test")) + SearchResponse searchResponse = prepareSearch().setQuery(matchQuery("title", "test")) .highlighter(new HighlightBuilder().field("title").encoder("html")) .get(); @@ -2553,7 +2596,7 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse searchResponse = client().prepareSearch() + SearchResponse searchResponse = prepareSearch() // lets make sure we analyze the query and we highlight the resulting terms .setQuery(matchQuery("title", "This is a Test")) .highlighter(new HighlightBuilder().field("title")) @@ -2565,8 +2608,7 @@ public void testPostingsHighlighterMultiMapperWithStore() throws Exception { assertHighlight(hit, "title", 0, 1, equalTo("this is a test . Second sentence.")); // search on title.key and highlight on title - searchResponse = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) + searchResponse = prepareSearch().setQuery(matchQuery("title.key", "this is a test")) .highlighter(new HighlightBuilder().field("title.key")) .get(); assertHitCount(searchResponse, 1L); @@ -2613,20 +2655,24 @@ public void testPostingsHighlighterMultiMapperFromSource() throws Exception { refresh(); // simple search on body with standard analyzer with a simple field query - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("title", "this is a test")) - .highlighter(new HighlightBuilder().field("title")) - .get(); - - assertHighlight(searchResponse, 0, "title", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title", "this is a test")).highlighter(new HighlightBuilder().field("title")), + 0, + "title", + 0, + 1, + equalTo("this is a test") + ); // search on title.key and highlight on title.key - searchResponse = client().prepareSearch() - .setQuery(matchQuery("title.key", "this is a test")) - .highlighter(new HighlightBuilder().field("title.key")) - .get(); - - assertHighlight(searchResponse, 0, "title.key", 0, 1, equalTo("this is a test")); + assertHighlight( + prepareSearch().setQuery(matchQuery("title.key", "this is a test")).highlighter(new HighlightBuilder().field("title.key")), + 0, + "title.key", + 0, + 1, + equalTo("this is a test") + ); } public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { @@ -2656,11 +2702,11 @@ public void testPostingsHighlighterShouldFailIfNoOffsets() throws Exception { indexRandom(true, indexRequestBuilders); assertNoFailures( - client().prepareSearch().setQuery(matchQuery("title", "this is a test")).highlighter(new HighlightBuilder().field("title")) + prepareSearch().setQuery(matchQuery("title", "this is a test")).highlighter(new HighlightBuilder().field("title")) ); } - public void testPostingsHighlighterBoostingQuery() throws IOException { + public void testPostingsHighlighterBoostingQuery() throws IOException, ExecutionException, InterruptedException { assertAcked(prepareCreate("test").setMapping(type1PostingsffsetsMapping())); ensureGreen(); client().prepareIndex("test") @@ -2672,9 +2718,14 @@ public void testPostingsHighlighterBoostingQuery() throws IOException { SearchSourceBuilder source = searchSource().query( boostingQuery(termQuery("field2", "brown"), termQuery("field2", "foobar")).negativeBoost(0.5f) ).highlighter(highlight().field("field2").preTags("").postTags("")); - SearchResponse searchResponse = client().search(new SearchRequest("test").source(source)).actionGet(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("The quick brown fox jumps over the lazy dog! Second sentence.")); + assertHighlight( + client().search(new SearchRequest("test").source(source)), + 0, + "field2", + 0, + 1, + equalTo("The quick brown fox jumps over the lazy dog! Second sentence.") + ); } private static XContentBuilder type1PostingsffsetsMapping() throws IOException { @@ -2706,9 +2757,8 @@ public void testPostingsHighlighterPrefixQuery() throws Exception { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(prefixQuery("field2", "qui")).highlighter(highlight().field("field2")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field2", 0, @@ -2728,10 +2778,8 @@ public void testPostingsHighlighterFuzzyQuery() throws Exception { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(fuzzyQuery("field2", "quck")).highlighter(highlight().field("field2")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field2", 0, @@ -2751,10 +2799,8 @@ public void testPostingsHighlighterRegexpQuery() throws Exception { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(regexpQuery("field2", "qu[a-l]+k")).highlighter(highlight().field("field2")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field2", 0, @@ -2774,10 +2820,8 @@ public void testPostingsHighlighterWildcardQuery() throws Exception { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(wildcardQuery("field2", "qui*")).highlighter(highlight().field("field2")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field2", 0, @@ -2786,7 +2830,7 @@ public void testPostingsHighlighterWildcardQuery() throws Exception { ); source = searchSource().query(wildcardQuery("field2", "qu*k")).highlighter(highlight().field("field2")); - searchResponse = client().prepareSearch("test").setSource(source).get(); + SearchResponse searchResponse = prepareSearch("test").setSource(source).get(); assertHitCount(searchResponse, 1L); assertHighlight( @@ -2809,9 +2853,7 @@ public void testPostingsHighlighterTermRangeQuery() throws Exception { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(rangeQuery("field2").gte("aaaa").lt("zzzz")) .highlighter(highlight().field("field2")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - - assertHighlight(searchResponse, 0, "field2", 0, 1, equalTo("aaab")); + assertHighlight(prepareSearch("test").setSource(source), 0, "field2", 0, 1, equalTo("aaab")); } public void testPostingsHighlighterQueryString() throws Exception { @@ -2826,9 +2868,8 @@ public void testPostingsHighlighterQueryString() throws Exception { logger.info("--> highlighting and searching on field2"); SearchSourceBuilder source = searchSource().query(queryStringQuery("qui*").defaultField("field2")) .highlighter(highlight().field("field2")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); assertHighlight( - searchResponse, + prepareSearch("test").setSource(source), 0, "field2", 0, @@ -2847,8 +2888,14 @@ public void testPostingsHighlighterRegexpQueryWithinConstantScoreQuery() throws logger.info("--> highlighting and searching on field1"); SearchSourceBuilder source = searchSource().query(constantScoreQuery(regexpQuery("field1", "pho[a-z]+"))) .highlighter(highlight().field("field1")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); + assertHighlight( + prepareSearch("test").setSource(source), + 0, + "field1", + 0, + 1, + equalTo("The photography word will get highlighted") + ); } public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Exception { @@ -2864,8 +2911,14 @@ public void testPostingsHighlighterMultiTermQueryMultipleLevels() throws Excepti .should(matchQuery("field1", "test")) .should(constantScoreQuery(queryStringQuery("field1:photo*"))) ).highlighter(highlight().field("field1")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); + assertHighlight( + prepareSearch("test").setSource(source), + 0, + "field1", + 0, + 1, + equalTo("The photography word will get highlighted") + ); } public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Exception { @@ -2879,8 +2932,14 @@ public void testPostingsHighlighterPrefixQueryWithinBooleanQuery() throws Except SearchSourceBuilder source = searchSource().query( boolQuery().must(prefixQuery("field1", "photo")).should(matchQuery("field1", "test").minimumShouldMatch("0")) ).highlighter(highlight().field("field1")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); + assertHighlight( + prepareSearch("test").setSource(source), + 0, + "field1", + 0, + 1, + equalTo("The photography word will get highlighted") + ); } public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Exception { @@ -2894,8 +2953,14 @@ public void testPostingsHighlighterQueryStringWithinFilteredQuery() throws Excep SearchSourceBuilder source = searchSource().query( boolQuery().must(queryStringQuery("field1:photo*")).mustNot(existsQuery("field_null")) ).highlighter(highlight().field("field1")); - SearchResponse searchResponse = client().prepareSearch("test").setSource(source).get(); - assertHighlight(searchResponse, 0, "field1", 0, 1, equalTo("The photography word will get highlighted")); + assertHighlight( + prepareSearch("test").setSource(source), + 0, + "field1", + 0, + 1, + equalTo("The photography word will get highlighted") + ); } public void testPostingsHighlighterManyDocs() throws Exception { @@ -2919,8 +2984,7 @@ public void testPostingsHighlighterManyDocs() throws Exception { indexRandom(true, indexRequestBuilders); logger.info("--> searching explicitly on field1 and highlighting on it"); - SearchRequestBuilder searchRequestBuilder = client().prepareSearch() - .setSize(COUNT) + SearchRequestBuilder searchRequestBuilder = prepareSearch().setSize(COUNT) .setQuery(termQuery("field1", "test")) .highlighter(new HighlightBuilder().field("field1")); SearchResponse searchResponse = searchRequestBuilder.get(); @@ -2951,11 +3015,15 @@ public void testDoesNotHighlightTypeName() throws Exception { indexRandom(true, client().prepareIndex("test").setSource("foo", "test typename")); for (String highlighter : ALL_TYPES) { - SearchResponse response = client().prepareSearch("test") - .setQuery(matchQuery("foo", "test")) - .highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false)) - .get(); - assertHighlight(response, 0, "foo", 0, 1, equalTo("test typename")); + assertHighlight( + prepareSearch("test").setQuery(matchQuery("foo", "test")) + .highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false)), + 0, + "foo", + 0, + 1, + equalTo("test typename") + ); } } @@ -2979,11 +3047,15 @@ public void testDoesNotHighlightAliasFilters() throws Exception { indexRandom(true, client().prepareIndex("test").setSource("foo", "test japanese")); for (String highlighter : ALL_TYPES) { - SearchResponse response = client().prepareSearch("filtered_alias") - .setQuery(matchQuery("foo", "test")) - .highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false)) - .get(); - assertHighlight(response, 0, "foo", 0, 1, equalTo("test japanese")); + assertHighlight( + prepareSearch("filtered_alias").setQuery(matchQuery("foo", "test")) + .highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false)), + 0, + "foo", + 0, + 1, + equalTo("test japanese") + ); } } @@ -3060,22 +3132,25 @@ private

    > void phraseBoostTestCaseForClauses( ) { Matcher highlightedMatcher = Matchers.either(containsString("highlight words together")) .or(containsString("highlight words together")); - SearchRequestBuilder search = client().prepareSearch("test") - .highlighter( - new HighlightBuilder().field("field1", 100, 1).order("score").highlighterType(highlighterType).requireFieldMatch(true) - ); + SearchRequestBuilder search = prepareSearch("test").highlighter( + new HighlightBuilder().field("field1", 100, 1).order("score").highlighterType(highlighterType).requireFieldMatch(true) + ); // Try with a bool query phrase.boost(boost); - SearchResponse response = search.setQuery(boolQuery().must(terms).should(phrase)).get(); - assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); + assertHighlight(search.setQuery(boolQuery().must(terms).should(phrase)), 0, "field1", 0, 1, highlightedMatcher); phrase.boost(1); // Try with a boosting query - response = search.setQuery(boostingQuery(phrase, terms).boost(boost).negativeBoost(1)).get(); - assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); + assertHighlight(search.setQuery(boostingQuery(phrase, terms).boost(boost).negativeBoost(1)), 0, "field1", 0, 1, highlightedMatcher); // Try with a boosting query using a negative boost - response = search.setQuery(boostingQuery(phrase, terms).boost(1).negativeBoost(1 / boost)).get(); - assertHighlight(response, 0, "field1", 0, 1, highlightedMatcher); + assertHighlight( + search.setQuery(boostingQuery(phrase, terms).boost(1).negativeBoost(1 / boost)), + 0, + "field1", + 0, + 1, + highlightedMatcher + ); } public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOException { @@ -3111,11 +3186,9 @@ public void testGeoFieldHighlightingWithDifferentHighlighters() throws IOExcepti .setCorners(61.10078883158897, -170.15625, -64.92354174306496, 118.47656249999999) ) .should(QueryBuilders.termQuery("text", "failure")); - SearchResponse search = client().prepareSearch() - .setSource( - new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) - ) - .get(); + SearchResponse search = prepareSearch().setSource( + new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) + ).get(); assertNoFailures(search); assertThat(search.getHits().getTotalHits().value, equalTo(1L)); assertThat(search.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); @@ -3158,11 +3231,12 @@ public void testGeoFieldHighlightingWhenQueryGetsRewritten() throws IOException .setCorners(new GeoPoint(48.934059, 41.610741), new GeoPoint(-23.065941, 113.610741)) ) ); - SearchResponse search = client().prepareSearch() - .setSource(new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().highlighterType("plain").field("jd"))) - .get(); - assertNoFailures(search); - assertThat(search.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + prepareSearch().setSource( + new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().highlighterType("plain").field("jd")) + ), + 1 + ); } public void testKeywordFieldHighlighting() throws IOException { @@ -3184,12 +3258,10 @@ public void testKeywordFieldHighlighting() throws IOException { .setSource(jsonBuilder().startObject().field("keyword_field", "some text").endObject()) .get(); refresh(); - SearchResponse search = client().prepareSearch() - .setSource( - new SearchSourceBuilder().query(QueryBuilders.matchQuery("keyword_field", "some text")) - .highlighter(new HighlightBuilder().field("*")) - ) - .get(); + SearchResponse search = prepareSearch().setSource( + new SearchSourceBuilder().query(QueryBuilders.matchQuery("keyword_field", "some text")) + .highlighter(new HighlightBuilder().field("*")) + ).get(); assertNoFailures(search); assertThat(search.getHits().getTotalHits().value, equalTo(1L)); assertThat( @@ -3216,8 +3288,7 @@ public void testCopyToFields() throws Exception { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch() - .setQuery(matchQuery("foo_copy", "brown")) + SearchResponse response = prepareSearch().setQuery(matchQuery("foo_copy", "brown")) .highlighter(new HighlightBuilder().field(new Field("foo_copy"))) .get(); @@ -3267,8 +3338,7 @@ public void testACopyFieldWithNestedQuery() throws Exception { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) + SearchResponse searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) .highlighter(new HighlightBuilder().field(new Field("foo_text").highlighterType("fvh")).requireFieldMatch(false)) .get(); assertHitCount(searchResponse, 1); @@ -3285,8 +3355,7 @@ public void testFunctionScoreQueryHighlight() throws Exception { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"))) + SearchResponse searchResponse = prepareSearch().setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"))) .highlighter(new HighlightBuilder().field(new Field("text"))) .get(); assertHitCount(searchResponse, 1); @@ -3306,15 +3375,12 @@ public void testFiltersFunctionScoreQueryHighlight() throws Exception { new RandomScoreFunctionBuilder() ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery( - new FunctionScoreQueryBuilder( - QueryBuilders.prefixQuery("text", "bro"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { filterBuilder } - ) + SearchResponse searchResponse = prepareSearch().setQuery( + new FunctionScoreQueryBuilder( + QueryBuilders.prefixQuery("text", "bro"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { filterBuilder } ) - .highlighter(new HighlightBuilder().field(new Field("text"))) - .get(); + ).highlighter(new HighlightBuilder().field(new Field("text"))).get(); assertHitCount(searchResponse, 1); HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("text"); assertThat(field.getFragments().length, equalTo(1)); @@ -3326,7 +3392,6 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { indicesAdmin().prepareCreate("index-1") .setMapping("d", "type=date", "field", "type=text,store=true,term_vector=with_positions_offsets") .setSettings(indexSettings(2, 0)) - .get() ); ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time"); @@ -3338,8 +3403,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { ); ensureSearchable("index-1"); for (int i = 0; i < 5; i++) { - final SearchResponse r1 = client().prepareSearch("index-1") - .addSort("d", SortOrder.DESC) + final SearchResponse r1 = prepareSearch("index-1").addSort("d", SortOrder.DESC) .setTrackScores(true) .highlighter(highlight().field("field").preTags("").postTags("")) .setQuery( @@ -3349,7 +3413,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { ) .get(); - assertSearchResponse(r1); + assertNoFailures(r1); assertThat(r1.getHits().getTotalHits().value, equalTo(1L)); assertHighlight(r1, 0, "field", 0, 1, equalTo("hello world")); } @@ -3396,18 +3460,16 @@ public void testWithNestedQuery() throws Exception { .get(); for (String type : new String[] { "unified", "plain" }) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None)) - .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + nestedQuery("foo", matchQuery("foo.text", "brown cow"), ScoreMode.None) + ).highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))).get(); assertHitCount(searchResponse, 1); HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("foo.text"); assertThat(field.getFragments().length, equalTo(2)); assertThat(field.getFragments()[0].string(), equalTo("brown shoes")); assertThat(field.getFragments()[1].string(), equalTo("cow")); - searchResponse = client().prepareSearch() - .setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) + searchResponse = prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) .get(); assertHitCount(searchResponse, 1); @@ -3415,8 +3477,7 @@ public void testWithNestedQuery() throws Exception { assertThat(field.getFragments().length, equalTo(1)); assertThat(field.getFragments()[0].string(), equalTo("brown shoes")); - searchResponse = client().prepareSearch() - .setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None)) + searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhraseQuery("foo.text", "brown shoes"), ScoreMode.None)) .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) .get(); assertHitCount(searchResponse, 1); @@ -3424,8 +3485,7 @@ public void testWithNestedQuery() throws Exception { assertThat(field.getFragments().length, equalTo(1)); assertThat(field.getFragments()[0].string(), equalTo("brown shoes")); - searchResponse = client().prepareSearch() - .setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None)) + searchResponse = prepareSearch().setQuery(nestedQuery("foo", matchPhrasePrefixQuery("foo.text", "bro"), ScoreMode.None)) .highlighter(new HighlightBuilder().field(new Field("foo.text").highlighterType(type))) .get(); assertHitCount(searchResponse, 1); @@ -3438,8 +3498,7 @@ public void testWithNestedQuery() throws Exception { // but we highlight the root text field since nested documents cannot be highlighted with postings nor term vectors // directly. for (String type : ALL_TYPES) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) + SearchResponse searchResponse = prepareSearch().setQuery(nestedQuery("foo", prefixQuery("foo.text", "bro"), ScoreMode.None)) .highlighter(new HighlightBuilder().field(new Field("text").highlighterType(type).requireFieldMatch(false))) .get(); assertHitCount(searchResponse, 1); @@ -3462,8 +3521,7 @@ public void testWithNormalizer() throws Exception { .get(); for (String highlighterType : new String[] { "unified", "plain" }) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("keyword", "hello world")) + SearchResponse searchResponse = prepareSearch().setQuery(matchQuery("keyword", "hello world")) .highlighter(new HighlightBuilder().field(new Field("keyword").highlighterType(highlighterType))) .get(); assertHitCount(searchResponse, 1); @@ -3484,10 +3542,9 @@ public void testDisableHighlightIdField() throws Exception { .get(); for (String highlighterType : new String[] { "plain", "unified" }) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("_id", "d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1")) - .highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighterType).requireFieldMatch(false))) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + matchQuery("_id", "d33f85bf1e51e84d9ab38948db9f3a068e1fe5294f1d8603914ac8c7bcc39ca1") + ).highlighter(new HighlightBuilder().field(new Field("*").highlighterType(highlighterType).requireFieldMatch(false))).get(); assertHitCount(searchResponse, 1); assertNull(searchResponse.getHits().getAt(0).getHighlightFields().get("_id")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java index a40938094d2b1..474d4ebc12843 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -58,7 +58,6 @@ import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -469,17 +468,14 @@ public void testTargetNodeFails() throws Exception { try { final AtomicBoolean failedRequest = new AtomicBoolean(); for (String node : internalCluster().getNodeNames()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.addRequestHandlingBehavior( - TransportFieldCapabilitiesAction.ACTION_NODE_NAME, - (handler, request, channel, task) -> { + MockTransportService.getInstance(node) + .addRequestHandlingBehavior(TransportFieldCapabilitiesAction.ACTION_NODE_NAME, (handler, request, channel, task) -> { if (failedRequest.compareAndSet(false, true)) { channel.sendResponse(new CircuitBreakingException("Simulated", CircuitBreaker.Durability.TRANSIENT)); } else { handler.messageReceived(request, channel, task); } - } - ); + }); } FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); request.indices("log-index-*"); @@ -495,8 +491,7 @@ public void testTargetNodeFails() throws Exception { assertThat(response.getField("field1"), hasKey("keyword")); } finally { for (String node : internalCluster().getNodeNames()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.clearAllRules(); + MockTransportService.getInstance(node).clearAllRules(); } } } @@ -571,16 +566,13 @@ public void testRelocation() throws Exception { try { final AtomicBoolean relocated = new AtomicBoolean(); for (String node : internalCluster().getNodeNames()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.addRequestHandlingBehavior( - TransportFieldCapabilitiesAction.ACTION_NODE_NAME, - (handler, request, channel, task) -> { + MockTransportService.getInstance(node) + .addRequestHandlingBehavior(TransportFieldCapabilitiesAction.ACTION_NODE_NAME, (handler, request, channel, task) -> { if (relocated.compareAndSet(false, true)) { moveOrCloseShardsOnNodes(node); } handler.messageReceived(request, channel, task); - } - ); + }); } FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); request.indices("log-index-*"); @@ -595,8 +587,7 @@ public void testRelocation() throws Exception { assertThat(response.getField("field1"), hasKey("long")); } finally { for (String node : internalCluster().getNodeNames()) { - MockTransportService transportService = (MockTransportService) internalCluster().getInstance(TransportService.class, node); - transportService.clearAllRules(); + MockTransportService.getInstance(node).clearAllRules(); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index aaac1d8964877..e3c9558eba907 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -63,7 +63,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -190,33 +189,32 @@ public void testStoredFields() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field1").get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("field1").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); // field2 is not stored, check that it is not extracted from source. - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field2").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("field2").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field2"), nullValue()); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field3").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("field3").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addStoredField("*3") .addStoredField("field1") .addStoredField("field2") @@ -227,20 +225,20 @@ public void testStoredFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("*").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap(), nullValue()); @@ -248,7 +246,7 @@ public void testStoredFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); assertThat(searchResponse.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getSourceAsMap(), notNullValue()); @@ -299,8 +297,7 @@ public void testScriptDocAndFields() throws Exception { indicesAdmin().refresh(new RefreshRequest()).actionGet(); logger.info("running doc['num1'].value"); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .addScriptField( @@ -337,8 +334,7 @@ public void testScriptDocAndFields() throws Exception { assertThat(response.getHits().getAt(2).getFields().get("date1").getValues().get(0), equalTo(120000L)); logger.info("running doc['num1'].value * factor"); - response = client().prepareSearch() - .setQuery(matchAllQuery()) + response = prepareSearch().setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .addScriptField( "sNum1", @@ -388,8 +384,7 @@ public void testScriptFieldWithNanos() throws Exception { client().prepareIndex("test").setId("2").setSource(jsonBuilder().startObject().field("date", date).endObject()) ); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .addSort("date", SortOrder.ASC) .addScriptField( "date1", @@ -429,8 +424,7 @@ public void testIdBasedScriptFields() throws Exception { } indexRandom(true, indexRequestBuilders); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .addSort("num1", SortOrder.ASC) .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())) @@ -473,8 +467,7 @@ public void testScriptFieldUsingSource() throws Exception { .get(); indicesAdmin().refresh(new RefreshRequest()).actionGet(); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .addScriptField("s_obj1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_source.obj1", Collections.emptyMap())) .addScriptField( "s_obj1_test", @@ -514,8 +507,7 @@ public void testScriptFieldUsingSource() throws Exception { public void testScriptFieldsForNullReturn() throws Exception { client().prepareIndex("test").setId("1").setSource("foo", "bar").setRefreshPolicy("true").get(); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .addScriptField("test_script_1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "return null", Collections.emptyMap())) .get(); @@ -632,8 +624,7 @@ public void testStoredFieldsWithoutSource() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addStoredField("byte_field") .addStoredField("short_field") .addStoredField("integer_field") @@ -686,7 +677,7 @@ public void testSearchFieldsMetadata() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField("field1").addStoredField("_routing").get(); + SearchResponse searchResponse = prepareSearch("my-index").addStoredField("field1").addStoredField("_routing").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue()); @@ -754,7 +745,7 @@ public void testGetFieldsComplexField() throws Exception { String field = "field1.field2.field3.field4"; - SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField(field).get(); + SearchResponse searchResponse = prepareSearch("my-index").addStoredField(field).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(searchResponse.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); @@ -766,9 +757,9 @@ public void testSingleValueFieldDatatField() throws ExecutionException, Interrup assertAcked(indicesAdmin().prepareCreate("test").setMapping("test_field", "type=keyword").get()); indexRandom(true, client().prepareIndex("test").setId("1").setSource("test_field", "foobar")); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field")) - .get(); + SearchResponse searchResponse = prepareSearch("test").setSource( + new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field") + ).get(); assertHitCount(searchResponse, 1); Map fields = searchResponse.getHits().getHits()[0].getFields(); assertThat(fields.get("test_field").getValue(), equalTo("foobar")); @@ -853,8 +844,7 @@ public void testDocValueFields() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchRequestBuilder builder = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchRequestBuilder builder = prepareSearch().setQuery(matchAllQuery()) .addDocValueField("text_field") .addDocValueField("keyword_field") .addDocValueField("byte_field") @@ -911,7 +901,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValues(), equalTo(List.of("KmQ="))); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); - builder = client().prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"); + builder = prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"); searchResponse = builder.get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -953,8 +943,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("binary_field").getValues(), equalTo(List.of("KmQ="))); assertThat(searchResponse.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); - builder = client().prepareSearch() - .setQuery(matchAllQuery()) + builder = prepareSearch().setQuery(matchAllQuery()) .addDocValueField("byte_field", "#.0") .addDocValueField("short_field", "#.0") .addDocValueField("integer_field", "#.0") @@ -999,7 +988,7 @@ public void testScriptFields() throws Exception { "type=long", "md", "type=double" - ).get() + ) ); final int numDocs = randomIntBetween(3, 8); List reqs = new ArrayList<>(); @@ -1025,7 +1014,7 @@ public void testScriptFields() throws Exception { } indexRandom(true, reqs); ensureSearchable(); - SearchRequestBuilder req = client().prepareSearch("index"); + SearchRequestBuilder req = prepareSearch("index"); for (String field : Arrays.asList("s", "ms", "l", "ml", "d", "md")) { req.addScriptField( field, @@ -1033,7 +1022,7 @@ public void testScriptFields() throws Exception { ); } SearchResponse resp = req.get(); - assertSearchResponse(resp); + assertNoFailures(resp); for (SearchHit hit : resp.getHits().getHits()) { final int id = Integer.parseInt(hit.getId()); Map fields = hit.getFields(); @@ -1082,8 +1071,7 @@ public void testDocValueFieldsWithFieldAlias() throws Exception { indexDoc("test", "1", "text_field", "foo", "date_field", formatter.format(date)); refresh("test"); - SearchRequestBuilder builder = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchRequestBuilder builder = prepareSearch().setQuery(matchAllQuery()) .addDocValueField("text_field_alias") .addDocValueField("date_field_alias") .addDocValueField("date_field"); @@ -1145,10 +1133,7 @@ public void testWildcardDocValueFieldsWithFieldAlias() throws Exception { indexDoc("test", "1", "text_field", "foo", "date_field", formatter.format(date)); refresh("test"); - SearchRequestBuilder builder = client().prepareSearch() - .setQuery(matchAllQuery()) - .addDocValueField("*alias") - .addDocValueField("date_field"); + SearchRequestBuilder builder = prepareSearch().setQuery(matchAllQuery()).addDocValueField("*alias").addDocValueField("date_field"); SearchResponse searchResponse = builder.get(); assertNoFailures(searchResponse); @@ -1200,8 +1185,7 @@ public void testStoredFieldsWithFieldAlias() throws Exception { indexDoc("test", "1", "field1", "value1", "field2", "value2"); refresh("test"); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addStoredField("field1-alias") .addStoredField("field2-alias") .get(); @@ -1244,7 +1228,7 @@ public void testWildcardStoredFieldsWithFieldAlias() throws Exception { indexDoc("test", "1", "field1", "value1", "field2", "value2"); refresh("test"); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).addStoredField("field*").get(); assertHitCount(searchResponse, 1L); SearchHit hit = searchResponse.getHits().getAt(0); @@ -1270,8 +1254,8 @@ public void testLoadMetadata() throws Exception { .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) ); - SearchResponse response = client().prepareSearch("test").addStoredField("field1").get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test").addStoredField("field1").get(); + assertNoFailures(response); assertHitCount(response, 1); Map fields = response.getHits().getAt(0).getMetadataFields(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 08697bc1470fb..e9ce09f7455a2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.io.IOException; import java.util.ArrayList; @@ -47,6 +46,7 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -137,7 +137,7 @@ public void testExplainScript() throws InterruptedException, IOException { ) ).actionGet(); - ElasticsearchAssertions.assertNoFailures(response); + assertNoFailures(response); SearchHits hits = response.getHits(); assertThat(hits.getTotalHits().value, equalTo(20L)); int idCounter = 19; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java index 7d006c2fa754c..61cccfdf114b1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -43,7 +43,7 @@ public void testFieldValueFactor() throws IOException { .endObject() .endObject() .endObject() - ).get() + ) ); client().prepareIndex("test").setId("1").setSource("test", 5, "body", "foo").get(); @@ -54,8 +54,7 @@ public void testFieldValueFactor() throws IOException { // document 2 scores higher because 17 > 5 assertOrderedSearchHits( - client().prepareSearch("test") - .setExplain(randomBoolean()) + prepareSearch("test").setExplain(randomBoolean()) .setQuery(functionScoreQuery(simpleQueryStringQuery("foo"), fieldValueFactorFunction("test"))), "2", "1" @@ -63,8 +62,7 @@ public void testFieldValueFactor() throws IOException { // try again, but this time explicitly use the do-nothing modifier assertOrderedSearchHits( - client().prepareSearch("test") - .setExplain(randomBoolean()) + prepareSearch("test").setExplain(randomBoolean()) .setQuery( functionScoreQuery( simpleQueryStringQuery("foo"), @@ -77,8 +75,7 @@ public void testFieldValueFactor() throws IOException { // document 1 scores higher because 1/5 > 1/17 assertOrderedSearchHits( - client().prepareSearch("test") - .setExplain(randomBoolean()) + prepareSearch("test").setExplain(randomBoolean()) .setQuery( functionScoreQuery( simpleQueryStringQuery("foo"), @@ -91,8 +88,7 @@ public void testFieldValueFactor() throws IOException { // doc 3 doesn't have a "test" field, so an exception will be thrown try { - SearchResponse response = client().prepareSearch("test") - .setExplain(randomBoolean()) + SearchResponse response = prepareSearch("test").setExplain(randomBoolean()) .setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("test"))) .get(); assertFailures(response); @@ -102,8 +98,7 @@ public void testFieldValueFactor() throws IOException { // doc 3 doesn't have a "test" field but we're defaulting it to 100 so it should be last assertOrderedSearchHits( - client().prepareSearch("test") - .setExplain(randomBoolean()) + prepareSearch("test").setExplain(randomBoolean()) .setQuery( functionScoreQuery( matchAllQuery(), @@ -116,8 +111,7 @@ public void testFieldValueFactor() throws IOException { ); // field is not mapped but we're defaulting it to 100 so all documents should have the same score - SearchResponse response = client().prepareSearch("test") - .setExplain(randomBoolean()) + SearchResponse response = prepareSearch("test").setExplain(randomBoolean()) .setQuery( functionScoreQuery( matchAllQuery(), @@ -132,8 +126,7 @@ public void testFieldValueFactor() throws IOException { // -1 divided by 0 is infinity, which should provoke an exception. try { - response = client().prepareSearch("test") - .setExplain(randomBoolean()) + response = prepareSearch("test").setExplain(randomBoolean()) .setQuery( functionScoreQuery( simpleQueryStringQuery("foo"), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 88e205ce3747c..e32abeb481a2a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -40,7 +40,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -94,7 +94,7 @@ public void testScriptScoresNested() throws IOException { ) ) ).actionGet(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)); } @@ -110,7 +110,7 @@ public void testScriptScoresWithAgg() throws IOException { searchSource().query(functionScoreQuery(scriptFunction(script))).aggregation(terms("score_agg").script(script)) ) ).actionGet(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getAt(0).getScore(), equalTo(1.0f)); assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getKeyAsString(), equalTo("1.0")); assertThat(((Terms) response.getAggregations().asMap().get("score_agg")).getBuckets().get(0).getDocCount(), is(1L)); @@ -201,7 +201,7 @@ public void testMinScoreFunctionScoreManyDocsAndRandomMinScore() throws IOExcept } protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat((int) searchResponse.getHits().getTotalHits().value, is(numMatchingDocs)); int pos = 0; for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value; hitId--) { @@ -219,7 +219,7 @@ public void testWithEmptyFunctions() throws IOException, ExecutionException, Int SearchResponse termQuery = client().search( new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text"))) ).get(); - assertSearchResponse(termQuery); + assertNoFailures(termQuery); assertThat(termQuery.getHits().getTotalHits().value, equalTo(1L)); float termQueryScore = termQuery.getHits().getAt(0).getScore(); @@ -234,7 +234,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(0.1f)) ) ).get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(1L)); assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); @@ -244,7 +244,7 @@ protected void testMinScoreApplied(CombineFunction boostMode, float expectedScor ) ).get(); - assertSearchResponse(response); + assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, equalTo(0L)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 109d391fd17fc..14df03bb86e8d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -49,7 +49,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFourthHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThirdHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; @@ -73,8 +72,7 @@ public void testEnforceWindowSize() { int numShards = getNumShards("test").numPrimaries; for (int j = 0; j < iters; j++) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchAllQuery()) .setRescorer( new QueryRescorerBuilder( functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.weightFactorFunction(100)).boostMode( @@ -85,7 +83,7 @@ public void testEnforceWindowSize() { ) .setSize(randomIntBetween(2, 10)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertFirstHit(searchResponse, hasScore(100.f)); int numDocsWith100AsAScore = 0; for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { @@ -123,8 +121,9 @@ public void testRescorePhrase() throws Exception { .setSource("field1", "quick huge brown", "field2", "the quick lazy huge brown fox jumps over the tree") .get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + SearchResponse searchResponse = prepareSearch().setQuery( + QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR) + ) .setRescorer( new QueryRescorerBuilder(matchPhraseQuery("field1", "quick brown").slop(2).boost(4.0f)).setRescoreQueryWeight(2), 5 @@ -137,8 +136,7 @@ public void testRescorePhrase() throws Exception { assertThat(searchResponse.getHits().getHits()[1].getId(), equalTo("3")); assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("2")); - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(3)), 5) .get(); @@ -147,8 +145,7 @@ public void testRescorePhrase() throws Exception { assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown")), 5) .get(); @@ -192,8 +189,9 @@ public void testMoreDocs() throws Exception { client().prepareIndex("test").setId("11").setSource("field1", "2st street boston massachusetts").get(); client().prepareIndex("test").setId("12").setSource("field1", "3st street boston massachusetts").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) + SearchResponse searchResponse = prepareSearch().setQuery( + QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR) + ) .setFrom(0) .setSize(5) .setRescorer( @@ -209,8 +207,9 @@ public void testMoreDocs() throws Exception { assertSecondHit(searchResponse, hasId("6")); assertThirdHit(searchResponse, hasId("3")); - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) + searchResponse = prepareSearch().setQuery( + QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR) + ) .setFrom(0) .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) @@ -229,8 +228,9 @@ public void testMoreDocs() throws Exception { assertThirdHit(searchResponse, hasId("3")); // Make sure non-zero from works: - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR)) + searchResponse = prepareSearch().setQuery( + QueryBuilders.matchQuery("field1", "lexington avenue massachusetts").operator(Operator.OR) + ) .setFrom(2) .setSize(5) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) @@ -272,8 +272,7 @@ public void testSmallRescoreWindow() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) .setFrom(0) .setSize(5) .get(); @@ -286,8 +285,7 @@ public void testSmallRescoreWindow() throws Exception { assertFourthHit(searchResponse, hasId("2")); // Now, rescore only top 2 hits w/ proximity: - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) .setFrom(0) .setSize(5) .setRescorer( @@ -306,8 +304,7 @@ public void testSmallRescoreWindow() throws Exception { assertFourthHit(searchResponse, hasId("2")); // Now, rescore only top 3 hits w/ proximity: - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts")) .setFrom(0) .setSize(5) .setRescorer( @@ -352,8 +349,7 @@ public void testRescorerMadeScoresWorse() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "lexington avenue boston massachusetts road").get(); indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) + SearchResponse searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .get(); @@ -366,8 +362,7 @@ public void testRescorerMadeScoresWorse() throws Exception { assertFourthHit(searchResponse, hasId("2")); // Now, penalizing rescore (nothing matches the rescore query): - searchResponse = client().prepareSearch() - .setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) + searchResponse = prepareSearch().setQuery(QueryBuilders.matchQuery("field1", "massachusetts").operator(Operator.OR)) .setFrom(0) .setSize(5) .setRescorer( @@ -435,8 +430,7 @@ public void testEquivalence() throws Exception { int rescoreWindow = between(1, 3) * resultSize; String intToEnglish = English.intToEnglish(between(0, numDocs - 1)); String query = intToEnglish.split(" ")[0]; - SearchResponse rescored = client().prepareSearch() - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse rescored = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) @@ -449,8 +443,7 @@ public void testEquivalence() throws Exception { ) .get(); - SearchResponse plain = client().prepareSearch() - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse plain = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) @@ -460,8 +453,7 @@ public void testEquivalence() throws Exception { // check equivalence assertEquivalent(query, plain, rescored); - rescored = client().prepareSearch() - .setSearchType(SearchType.QUERY_THEN_FETCH) + rescored = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference("test") // ensure we hit the same shards for tie-breaking .setQuery(QueryBuilders.matchQuery("field1", query).operator(Operator.OR)) .setFrom(0) @@ -503,8 +495,7 @@ public void testExplain() throws Exception { refresh(); { - SearchResponse searchResponse = client().prepareSearch() - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer( new QueryRescorerBuilder(matchPhraseQuery("field1", "the quick brown").slop(2).boost(4.0f)).setQueryWeight(0.5f) @@ -550,8 +541,7 @@ public void testExplain() throws Exception { innerRescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreModes[innerMode])); } - SearchResponse searchResponse = client().prepareSearch() - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .setRescorer(innerRescoreQuery, 5) .setExplain(true) @@ -574,8 +564,7 @@ public void testExplain() throws Exception { outerRescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreModes[outerMode])); } - searchResponse = client().prepareSearch() - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) .addRescorer(innerRescoreQuery, 5) .addRescorer(outerRescoreQuery.windowSize(10)) @@ -629,8 +618,7 @@ public void testScoring() throws Exception { rescoreQuery.setScoreMode(QueryRescoreMode.fromString(scoreMode)); } - SearchResponse rescored = client().prepareSearch() - .setPreference("test") // ensure we hit the same shards for tie-breaking + SearchResponse rescored = prepareSearch().setPreference("test") // ensure we hit the same shards for tie-breaking .setFrom(0) .setSize(10) .setQuery(query) @@ -698,7 +686,7 @@ public void testMultipleRescores() throws Exception { ).setScoreMode(QueryRescoreMode.Total); // First set the rescore window large enough that both rescores take effect - SearchRequestBuilder request = client().prepareSearch(); + SearchRequestBuilder request = prepareSearch(); request.addRescorer(eightIsGreat, numDocs).addRescorer(sevenIsBetter, numDocs); SearchResponse response = request.get(); assertFirstHit(response, hasId("7")); @@ -772,7 +760,7 @@ public void testFromSize() throws Exception { } refresh(); - SearchRequestBuilder request = client().prepareSearch(); + SearchRequestBuilder request = prepareSearch(); request.setQuery(QueryBuilders.termQuery("text", "hello")); request.setFrom(1); request.setSize(4); @@ -790,8 +778,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { Exception exc = expectThrows( Exception.class, - () -> client().prepareSearch() - .addSort(SortBuilders.fieldSort("number")) + () -> prepareSearch().addSort(SortBuilders.fieldSort("number")) .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()), 50) .get() @@ -801,8 +788,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { exc = expectThrows( Exception.class, - () -> client().prepareSearch() - .addSort(SortBuilders.fieldSort("number")) + () -> prepareSearch().addSort(SortBuilders.fieldSort("number")) .addSort(SortBuilders.scoreSort()) .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()), 50) @@ -811,8 +797,7 @@ public void testRescorePhaseWithInvalidSort() throws Exception { assertNotNull(exc.getCause()); assertThat(exc.getCause().getMessage(), containsString("Cannot use [sort] option in conjunction with [rescore].")); - SearchResponse resp = client().prepareSearch() - .addSort(SortBuilders.scoreSort()) + SearchResponse resp = prepareSearch().addSort(SortBuilders.scoreSort()) .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index ff1db506e73d8..ef8ffcf0d806a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -99,8 +99,7 @@ public void testConsistentHitsWithSameSeed() throws Exception { int innerIters = scaledRandomIntBetween(2, 5); SearchHit[] hits = null; for (int i = 0; i < innerIters; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + SearchResponse searchResponse = prepareSearch().setSize(docCount) // get all docs otherwise we are prone to tie-breaking .setPreference(preference) .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField("foo"))) .get(); @@ -166,80 +165,70 @@ public void testScoreAccessWithinScript() throws Exception { // Test for accessing _score Script script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score))", params); - SearchResponse resp = client().prepareSearch("test") - .setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) + SearchResponse resp = prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } ) - .get(); + ).get(); assertNoFailures(resp); SearchHit firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.intValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.intValue()))", params); - resp = client().prepareSearch("test") - .setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) + resp = prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } ) - .get(); + ).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.longValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.longValue()))", params); - resp = client().prepareSearch("test") - .setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) + resp = prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } ) - .get(); + ).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.floatValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.floatValue()))", params); - resp = client().prepareSearch("test") - .setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) + resp = prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } ) - .get(); + ).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); // Test for accessing _score.doubleValue() script = new Script(ScriptType.INLINE, NAME, "log(doc['index'].value + (factor * _score.doubleValue()))", params); - resp = client().prepareSearch("test") - .setQuery( - functionScoreQuery( - matchQuery("body", "foo"), - new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { - new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), - new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } - ) + resp = prepareSearch("test").setQuery( + functionScoreQuery( + matchQuery("body", "foo"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] { + new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction("index").factor(2)), + new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(script)) } ) - .get(); + ).get(); assertNoFailures(resp); firstHit = resp.getHits().getAt(0); assertThat(firstHit.getScore(), greaterThan(1f)); @@ -254,10 +243,9 @@ public void testSeedReportedInExplain() throws Exception { int seed = 12345678; - SearchResponse resp = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField(SeqNoFieldMapper.NAME))) - .setExplain(true) - .get(); + SearchResponse resp = prepareSearch("test").setQuery( + functionScoreQuery(matchAllQuery(), randomFunction().seed(seed).setField(SeqNoFieldMapper.NAME)) + ).setExplain(true).get(); assertNoFailures(resp); assertEquals(1, resp.getHits().getTotalHits().value); SearchHit firstHit = resp.getHits().getAt(0); @@ -268,13 +256,13 @@ public void testNoDocs() throws Exception { createIndex("test"); ensureGreen(); - SearchResponse resp = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME))) - .get(); + SearchResponse resp = prepareSearch("test").setQuery( + functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) + ).get(); assertNoFailures(resp); assertEquals(0, resp.getHits().getTotalHits().value); - resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())).get(); + resp = prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())).get(); assertNoFailures(resp); assertEquals(0, resp.getHits().getTotalHits().value); } @@ -292,8 +280,7 @@ public void testScoreRange() throws Exception { refresh(); int iters = scaledRandomIntBetween(10, 20); for (int i = 0; i < iters; ++i) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(functionScoreQuery(matchAllQuery(), randomFunction())) + SearchResponse searchResponse = prepareSearch().setQuery(functionScoreQuery(matchAllQuery(), randomFunction())) .setSize(docCount) .get(); @@ -314,20 +301,17 @@ public void testSeeds() throws Exception { flushAndRefresh(); assertNoFailures( - client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + prepareSearch().setSize(docCount) // get all docs otherwise we are prone to tie-breaking .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(randomInt()).setField(SeqNoFieldMapper.NAME))) ); assertNoFailures( - client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + prepareSearch().setSize(docCount) // get all docs otherwise we are prone to tie-breaking .setQuery(functionScoreQuery(matchAllQuery(), randomFunction().seed(randomLong()).setField(SeqNoFieldMapper.NAME))) ); assertNoFailures( - client().prepareSearch() - .setSize(docCount) // get all docs otherwise we are prone to tie-breaking + prepareSearch().setSize(docCount) // get all docs otherwise we are prone to tie-breaking .setQuery( functionScoreQuery( matchAllQuery(), @@ -354,8 +338,7 @@ public void checkDistribution() throws Exception { for (int i = 0; i < count; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(functionScoreQuery(matchAllQuery(), new RandomScoreFunctionBuilder())) + SearchResponse searchResponse = prepareSearch().setQuery(functionScoreQuery(matchAllQuery(), new RandomScoreFunctionBuilder())) .get(); matrix[Integer.valueOf(searchResponse.getHits().getAt(0).getId())]++; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 2ae9824ab6381..d5bbf767d1046 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -126,24 +126,21 @@ public void testDistanceScript() throws Exception { refresh(); // Test doc['location'].arcDistance(lat, lon) - SearchResponse searchResponse1 = client().prepareSearch() - .addStoredField("_source") + SearchResponse searchResponse1 = prepareSearch().addStoredField("_source") .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "arcDistance", Collections.emptyMap())) .get(); Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance1, closeTo(GeoUtils.arcDistance(src_lat, src_lon, tgt_lat, tgt_lon), 0.01d)); // Test doc['location'].planeDistance(lat, lon) - SearchResponse searchResponse2 = client().prepareSearch() - .addStoredField("_source") + SearchResponse searchResponse2 = prepareSearch().addStoredField("_source") .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "planeDistance", Collections.emptyMap())) .get(); Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue(); assertThat(resultDistance2, closeTo(GeoUtils.planeDistance(src_lat, src_lon, tgt_lat, tgt_lon), 0.01d)); // Test doc['location'].geohashDistance(lat, lon) - SearchResponse searchResponse4 = client().prepareSearch() - .addStoredField("_source") + SearchResponse searchResponse4 = prepareSearch().addStoredField("_source") .addScriptField("distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "geohashDistance", Collections.emptyMap())) .get(); Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue(); @@ -156,8 +153,7 @@ public void testDistanceScript() throws Exception { ); // Test doc['location'].arcDistance(lat, lon + 360)/1000d - SearchResponse searchResponse5 = client().prepareSearch() - .addStoredField("_source") + SearchResponse searchResponse5 = prepareSearch().addStoredField("_source") .addScriptField( "distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "arcDistance(lat, lon + 360)/1000d", Collections.emptyMap()) @@ -167,8 +163,7 @@ public void testDistanceScript() throws Exception { assertThat(resultArcDistance5, closeTo(GeoUtils.arcDistance(src_lat, src_lon, tgt_lat, tgt_lon) / 1000d, 0.01d)); // Test doc['location'].arcDistance(lat + 360, lon)/1000d - SearchResponse searchResponse6 = client().prepareSearch() - .addStoredField("_source") + SearchResponse searchResponse6 = prepareSearch().addStoredField("_source") .addScriptField( "distance", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "arcDistance(lat + 360, lon)/1000d", Collections.emptyMap()) @@ -194,7 +189,7 @@ public void testGeoDistanceAggregation() throws IOException { refresh(); - SearchRequestBuilder search = client().prepareSearch("test"); + SearchRequestBuilder search = prepareSearch("test"); String name = "TestPosition"; search.setQuery(QueryBuilders.matchAllQuery()) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPointScriptDocValuesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPointScriptDocValuesIT.java index 55994458388a4..90b942c576a82 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPointScriptDocValuesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPointScriptDocValuesIT.java @@ -36,7 +36,7 @@ import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -164,7 +164,7 @@ public void testRandomPoint() throws Exception { .addScriptField("label_lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lat", Collections.emptyMap())) .addScriptField("label_lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lon", Collections.emptyMap())) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); final double qLat = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); final double qLon = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon)); @@ -208,7 +208,7 @@ public void testRandomMultiPoint() throws Exception { .addScriptField("label_lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lat", Collections.emptyMap())) .addScriptField("label_lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lon", Collections.emptyMap())) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); for (int i = 0; i < size; i++) { lats[i] = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lats[i])); @@ -247,7 +247,7 @@ public void testNullPoint() throws Exception { .addScriptField("height", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "height", Collections.emptyMap())) .addScriptField("width", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "width", Collections.emptyMap())) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); Map fields = searchResponse.getHits().getHits()[0].getFields(); assertThat(fields.get("lat").getValue(), equalTo(Double.NaN)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 814365595705a..1f46b6603482e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -142,7 +142,7 @@ public void testSimplePolygon() throws Exception { points.add(new GeoPoint(40.8, -74.1)); points.add(new GeoPoint(40.8, -74.0)); points.add(new GeoPoint(40.7, -74.0)); - SearchResponse searchResponse = client().prepareSearch("test") // from NY + SearchResponse searchResponse = prepareSearch("test") // from NY .setQuery(boolQuery().must(geoPolygonQuery("location", points))) .get(); assertHitCount(searchResponse, 4); @@ -158,7 +158,7 @@ public void testSimpleUnclosedPolygon() throws Exception { points.add(new GeoPoint(40.7, -74.1)); points.add(new GeoPoint(40.8, -74.1)); points.add(new GeoPoint(40.8, -74.0)); - SearchResponse searchResponse = client().prepareSearch("test") // from NY + SearchResponse searchResponse = prepareSearch("test") // from NY .setQuery(boolQuery().must(geoPolygonQuery("location", points))) .get(); assertHitCount(searchResponse, 4); @@ -176,7 +176,7 @@ public void testFieldAlias() { points.add(new GeoPoint(40.8, -74.0)); points.add(new GeoPoint(40.7, -74.0)); assertHitCount( - client().prepareSearch("test") // from NY + prepareSearch("test") // from NY .setQuery(boolQuery().must(geoPolygonQuery("alias", points))), 4 ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java index 5a8b506b99296..415de06030938 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/morelikethis/MoreLikeThisIT.java @@ -39,11 +39,11 @@ import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -83,8 +83,7 @@ public void testSimpleMoreLikeThis() throws Exception { logger.info("Running moreLikeThis"); assertHitCount( - client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), 1L ); } @@ -117,8 +116,7 @@ public void testSimpleMoreLikeThisWithTypes() throws Exception { logger.info("Running moreLikeThis"); assertHitCount( - client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), 1L ); } @@ -154,11 +152,10 @@ public void testMoreLikeThisForZeroTokensInOneOfTheAnalyzedFields() throws Excep indicesAdmin().refresh(new RefreshRequest()).actionGet(); assertHitCount( - client().prepareSearch() - .setQuery( - moreLikeThisQuery(new String[] { "myField", "empty" }, null, new Item[] { new Item("test", "1") }).minTermFreq(1) - .minDocFreq(1) - ), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "myField", "empty" }, null, new Item[] { new Item("test", "1") }).minTermFreq(1) + .minDocFreq(1) + ), 1L ); } @@ -180,8 +177,7 @@ public void testSimpleMoreLikeOnLongField() throws Exception { logger.info("Running moreLikeThis"); assertHitCount( - client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), 0L ); } @@ -224,22 +220,21 @@ public void testMoreLikeThisWithAliases() throws Exception { logger.info("Running moreLikeThis on index"); assertHitCount( - client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), 2L ); logger.info("Running moreLikeThis on beta shard"); - SearchResponse response = client().prepareSearch("beta") - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)) - .get(); + SearchResponse response = prepareSearch("beta").setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) + ).get(); assertHitCount(response, 1L); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); logger.info("Running moreLikeThis on release shard"); - response = client().prepareSearch("release") - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)) - .get(); + response = prepareSearch("release").setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) + ).get(); assertHitCount(response, 1L); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); @@ -272,9 +267,9 @@ public void testMoreLikeThisWithAliasesInLikeDocuments() throws Exception { ).actionGet(); refresh(indexName); - SearchResponse response = client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, "1") }).minTermFreq(1).minDocFreq(1)) - .get(); + SearchResponse response = prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, "1") }).minTermFreq(1).minDocFreq(1) + ).get(); assertHitCount(response, 2L); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); } @@ -288,8 +283,8 @@ public void testMoreLikeThisIssue2197() throws Exception { indicesAdmin().prepareRefresh("foo").get(); assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN)); - assertNoFailures(client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") }))); - assertNoFailures(client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") }))); + assertNoFailures(prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") }))); + assertNoFailures(prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") }))); } // Issue #2489 @@ -304,9 +299,7 @@ public void testMoreLikeWithCustomRouting() throws Exception { .get(); indicesAdmin().prepareRefresh("foo").get(); - assertNoFailures( - client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("2") })) - ); + assertNoFailures(prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("2") }))); } // Issue #3039 @@ -320,9 +313,7 @@ public void testMoreLikeThisIssueRoutingNotSerialized() throws Exception { .setRouting("4000") .get(); indicesAdmin().prepareRefresh("foo").get(); - assertNoFailures( - client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("4000") })) - ); + assertNoFailures(prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("4000") }))); } // Issue #3252 @@ -356,90 +347,83 @@ public void testNumericField() throws Exception { // Implicit list of fields -> ignore numeric fields assertHitCount( - client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)), 1L ); // Explicit list of fields including numeric fields -> fail assertRequestBuilderThrows( - client().prepareSearch() - .setQuery( - new MoreLikeThisQueryBuilder(new String[] { "string_value", "int_value" }, null, new Item[] { new Item("test", "1") }) - .minTermFreq(1) - .minDocFreq(1) - ), + prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(new String[] { "string_value", "int_value" }, null, new Item[] { new Item("test", "1") }) + .minTermFreq(1) + .minDocFreq(1) + ), SearchPhaseExecutionException.class ); // mlt query with no field -> exception because _all is not enabled) assertRequestBuilderThrows( - client().prepareSearch().setQuery(moreLikeThisQuery(new String[] { "index" }).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery(moreLikeThisQuery(new String[] { "index" }).minTermFreq(1).minDocFreq(1)), SearchPhaseExecutionException.class ); // mlt query with string fields assertHitCount( - client().prepareSearch() - .setQuery(moreLikeThisQuery(new String[] { "string_value" }, new String[] { "index" }, null).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "string_value" }, new String[] { "index" }, null).minTermFreq(1).minDocFreq(1) + ), 2L ); // mlt query with at least a numeric field -> fail by default assertRequestBuilderThrows( - client().prepareSearch() - .setQuery(moreLikeThisQuery(new String[] { "string_value", "int_value" }, new String[] { "index" }, null)), + prepareSearch().setQuery(moreLikeThisQuery(new String[] { "string_value", "int_value" }, new String[] { "index" }, null)), SearchPhaseExecutionException.class ); // mlt query with at least a numeric field -> fail by command assertRequestBuilderThrows( - client().prepareSearch() - .setQuery( - moreLikeThisQuery(new String[] { "string_value", "int_value" }, new String[] { "index" }, null).failOnUnsupportedField( - true - ) - ), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "string_value", "int_value" }, new String[] { "index" }, null).failOnUnsupportedField(true) + ), SearchPhaseExecutionException.class ); // mlt query with at least a numeric field but fail_on_unsupported_field set to false assertHitCount( - client().prepareSearch() - .setQuery( - moreLikeThisQuery(new String[] { "string_value", "int_value" }, new String[] { "index" }, null).minTermFreq(1) - .minDocFreq(1) - .failOnUnsupportedField(false) - ), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "string_value", "int_value" }, new String[] { "index" }, null).minTermFreq(1) + .minDocFreq(1) + .failOnUnsupportedField(false) + ), 2L ); // mlt field query on a numeric field -> failure by default assertRequestBuilderThrows( - client().prepareSearch() - .setQuery(moreLikeThisQuery(new String[] { "int_value" }, new String[] { "42" }, null).minTermFreq(1).minDocFreq(1)), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "int_value" }, new String[] { "42" }, null).minTermFreq(1).minDocFreq(1) + ), SearchPhaseExecutionException.class ); // mlt field query on a numeric field -> failure by command assertRequestBuilderThrows( - client().prepareSearch() - .setQuery( - moreLikeThisQuery(new String[] { "int_value" }, new String[] { "42" }, null).minTermFreq(1) - .minDocFreq(1) - .failOnUnsupportedField(true) - ), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "int_value" }, new String[] { "42" }, null).minTermFreq(1) + .minDocFreq(1) + .failOnUnsupportedField(true) + ), SearchPhaseExecutionException.class ); // mlt field query on a numeric field but fail_on_unsupported_field set to false assertHitCount( - client().prepareSearch() - .setQuery( - moreLikeThisQuery(new String[] { "int_value" }, new String[] { "42" }, null).minTermFreq(1) - .minDocFreq(1) - .failOnUnsupportedField(false) - ), + prepareSearch().setQuery( + moreLikeThisQuery(new String[] { "int_value" }, new String[] { "42" }, null).minTermFreq(1) + .minDocFreq(1) + .failOnUnsupportedField(false) + ), 0L ); } @@ -471,7 +455,7 @@ public void testMoreLikeThisWithFieldAlias() throws Exception { QueryBuilder query = QueryBuilders.moreLikeThisQuery(new String[] { "alias" }, null, new Item[] { item }) .minTermFreq(1) .minDocFreq(1); - assertHitCount(client().prepareSearch().setQuery(query), 1L); + assertHitCount(prepareSearch().setQuery(query), 1L); } public void testSimpleMoreLikeInclude() throws Exception { @@ -510,37 +494,34 @@ public void testSimpleMoreLikeInclude() throws Exception { logger.info("Running More Like This with include true"); assertOrderedSearchHits( - client().prepareSearch() - .setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1) - .minDocFreq(1) - .include(true) - .minimumShouldMatch("0%") - ), + prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1) + .minDocFreq(1) + .include(true) + .minimumShouldMatch("0%") + ), "1", "2" ); assertOrderedSearchHits( - client().prepareSearch() - .setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "2") }).minTermFreq(1) - .minDocFreq(1) - .include(true) - .minimumShouldMatch("0%") - ), + prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "2") }).minTermFreq(1) + .minDocFreq(1) + .include(true) + .minimumShouldMatch("0%") + ), "2", "1" ); logger.info("Running More Like This with include false"); assertSearchHits( - client().prepareSearch() - .setQuery( - new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1) - .minDocFreq(1) - .minimumShouldMatch("0%") - ), + prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1) + .minDocFreq(1) + .minimumShouldMatch("0%") + ), "2" ); } @@ -577,7 +558,7 @@ public void testSimpleMoreLikeThisIds() throws Exception { .include(true) .minTermFreq(1) .minDocFreq(1); - assertHitCount(client().prepareSearch().setQuery(queryBuilder), 3L); + assertHitCount(prepareSearch().setQuery(queryBuilder), 3L); } public void testMoreLikeThisMultiValueFields() throws Exception { @@ -608,9 +589,7 @@ public void testMoreLikeThisMultiValueFields() throws Exception { .minDocFreq(1) .maxQueryTerms(max_query_terms) .minimumShouldMatch("0%"); - SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, max_query_terms); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), max_query_terms); } } @@ -641,8 +620,8 @@ public void testMinimumShouldMatch() throws ExecutionException, InterruptedExcep .minDocFreq(1) .minimumShouldMatch(minimumShouldMatch); logger.info("Testing with minimum_should_match = {}", minimumShouldMatch); - SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test").setQuery(mltQuery).get(); + assertNoFailures(response); if (minimumShouldMatch.equals("0%")) { assertHitCount(response, 10); } else { @@ -671,9 +650,7 @@ public void testMoreLikeThisArtificialDocs() throws Exception { .minDocFreq(0) .maxQueryTerms(100) .minimumShouldMatch("100%"); // strict all terms must match! - SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), 1); } public void testMoreLikeThisMalformedArtificialDocs() throws Exception { @@ -697,16 +674,12 @@ public void testMoreLikeThisMalformedArtificialDocs() throws Exception { MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", malformedFieldDoc) }).minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("0%"); - SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, 0); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), 0); logger.info("Checking with an empty document ..."); XContentBuilder emptyDoc = jsonBuilder().startObject().endObject(); mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", emptyDoc) }).minTermFreq(0).minDocFreq(0).minimumShouldMatch("0%"); - response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, 0); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), 0); logger.info("Checking the document matches otherwise ..."); XContentBuilder normalDoc = jsonBuilder().startObject() @@ -716,12 +689,10 @@ public void testMoreLikeThisMalformedArtificialDocs() throws Exception { mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", normalDoc) }).minTermFreq(0) .minDocFreq(0) .minimumShouldMatch("100%"); // strict all terms must match but date is ignored - response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), 1); } - public void testMoreLikeThisUnlike() throws ExecutionException, InterruptedException, IOException { + public void testMoreLikeThisUnlike() throws InterruptedException, IOException { createIndex("test"); ensureGreen(); int numFields = randomIntBetween(5, 10); @@ -745,9 +716,7 @@ public void testMoreLikeThisUnlike() throws ExecutionException, InterruptedExcep .minDocFreq(0) .maxQueryTerms(100) .minimumShouldMatch("0%"); - SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, numFields); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), numFields); logger.info("Now check like this doc, but ignore one doc in the index, then two and so on..."); List docs = new ArrayList<>(numFields); @@ -759,10 +728,7 @@ public void testMoreLikeThisUnlike() throws ExecutionException, InterruptedExcep .maxQueryTerms(100) .include(true) .minimumShouldMatch("0%"); - - response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, numFields - (i + 1)); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), numFields - (i + 1)); } } @@ -784,17 +750,13 @@ public void testSelectFields() throws IOException, ExecutionException, Interrupt .minDocFreq(0) .include(true) .minimumShouldMatch("1%"); - SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, 2); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), 2); mltQuery = moreLikeThisQuery(new String[] { "text" }, null, new Item[] { new Item("test", "1") }).minTermFreq(0) .minDocFreq(0) .include(true) .minimumShouldMatch("1%"); - response = client().prepareSearch("test").setQuery(mltQuery).get(); - assertSearchResponse(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(mltQuery), 1); } public void testWithRouting() throws IOException { @@ -811,7 +773,7 @@ public void testWithRouting() throws IOException { ); moreLikeThisQueryBuilder.minTermFreq(1); moreLikeThisQueryBuilder.minDocFreq(1); - SearchResponse searchResponse = client().prepareSearch("index").setQuery(moreLikeThisQueryBuilder).get(); + SearchResponse searchResponse = prepareSearch("index").setQuery(moreLikeThisQueryBuilder).get(); assertEquals(2, searchResponse.getHits().getTotalHits().value); } @@ -842,9 +804,9 @@ public void testWithMissingRouting() throws IOException { logger.info("Running moreLikeThis with one item without routing attribute"); SearchPhaseExecutionException exception = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch() - .setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1)) - .get() + () -> prepareSearch().setQuery( + new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1) + ).get() ); Throwable cause = exception.getCause(); @@ -856,14 +818,12 @@ public void testWithMissingRouting() throws IOException { logger.info("Running moreLikeThis with one item with routing attribute and two items without routing attribute"); SearchPhaseExecutionException exception = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch() - .setQuery( - new MoreLikeThisQueryBuilder( - null, - new Item[] { new Item("test", "1").routing("1"), new Item("test", "2"), new Item("test", "3") } - ).minTermFreq(1).minDocFreq(1) - ) - .get() + () -> prepareSearch().setQuery( + new MoreLikeThisQueryBuilder( + null, + new Item[] { new Item("test", "1").routing("1"), new Item("test", "2"), new Item("test", "3") } + ).minTermFreq(1).minDocFreq(1) + ).get() ); Throwable cause = exception.getCause(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java index 6b3193c4b383c..aa418288b8ebf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/msearch/MultiSearchIT.java @@ -43,9 +43,9 @@ public void testSimpleMultiSearch() { client().prepareIndex("test").setId("2").setSource("field", "yyy").get(); refresh(); MultiSearchResponse response = client().prepareMultiSearch() - .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "xxx"))) - .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "yyy"))) - .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "xxx"))) + .add(prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "yyy"))) + .add(prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .get(); for (MultiSearchResponse.Item item : response) { @@ -73,7 +73,7 @@ public void testSimpleMultiSearchMoreRequests() { request.maxConcurrentSearchRequests(randomIntBetween(1, numSearchRequests)); } for (int i = 0; i < numSearchRequests; i++) { - request.add(client().prepareSearch("test")); + request.add(prepareSearch("test")); } MultiSearchResponse response = client().multiSearch(request).actionGet(); @@ -96,9 +96,9 @@ public void testCCSCheckCompatibility() throws Exception { client().prepareIndex("test").setId("2").setSource("field", "yyy").get(); refresh(); MultiSearchResponse response = client().prepareMultiSearch() - .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "xxx"))) - .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "yyy"))) - .add(client().prepareSearch("test").setQuery(new DummyQueryBuilder() { + .add(prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "xxx"))) + .add(prepareSearch("test").setQuery(QueryBuilders.termQuery("field", "yyy"))) + .add(prepareSearch("test").setQuery(new DummyQueryBuilder() { @Override public TransportVersion getMinimalSupportedVersion() { return transportVersion; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/NestedWithMinScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/NestedWithMinScoreIT.java index ce1b4bf30d498..e238a254b7843 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/NestedWithMinScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/NestedWithMinScoreIT.java @@ -111,6 +111,6 @@ public void testNestedWithMinScore() throws Exception { if (randomBoolean()) { source.trackTotalHitsUpTo(randomBoolean() ? Integer.MAX_VALUE : randomIntBetween(1, 1000)); } - ElasticsearchAssertions.assertSearchHits(client().prepareSearch("test").setSource(source), "d1"); + ElasticsearchAssertions.assertSearchHits(prepareSearch("test").setSource(source), "d1"); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 598c65b8c999d..736796d73f164 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -51,9 +51,9 @@ public void testSimpleNested() throws Exception { ensureGreen(); // check on no data, see it works - SearchResponse searchResponse = client().prepareSearch("test").get(); + SearchResponse searchResponse = prepareSearch("test").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); client().prepareIndex("test") @@ -83,24 +83,22 @@ public void testSimpleNested() throws Exception { // check the numDocs assertDocumentCount("test", 3); - searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); // search for something that matches the nested doc, and see that we don't find the nested doc - searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); + searchResponse = prepareSearch("test").setQuery(matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("n_field1", "n_value1_1")).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); // now, do a nested query - searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) + searchResponse = prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) + searchResponse = prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .get(); assertNoFailures(searchResponse); @@ -130,44 +128,38 @@ public void testSimpleNested() throws Exception { refresh(); assertDocumentCount("test", 6); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); // filter - searchResponse = client().prepareSearch("test") - .setQuery( - boolQuery().must(matchAllQuery()) - .mustNot( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()) + .mustNot( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), + ScoreMode.Avg ) - ) - .get(); + ) + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); // check with type prefix - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.n_field1", "n_value1_1")).must(termQuery("nested1.n_field2", "n_value2_1")), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -178,8 +170,7 @@ public void testSimpleNested() throws Exception { refresh(); assertDocumentCount("test", 3); - searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) + searchResponse = prepareSearch("test").setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)) .get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); @@ -247,93 +238,81 @@ public void testMultiNested() throws Exception { assertDocumentCount("test", 7); // do some multi nested queries - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", termQuery("nested1.field1", "1"), ScoreMode.Avg)) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + nestedQuery("nested1", termQuery("nested1.field1", "1"), ScoreMode.Avg) + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)) - .get(); + searchResponse = prepareSearch("test").setQuery( + nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg) + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "3"), ScoreMode.Avg)), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "3"), ScoreMode.Avg)), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "4"), ScoreMode.Avg)), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "4"), ScoreMode.Avg)), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "1")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "1")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "4")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "4")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "5"), ScoreMode.Avg)), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test") - .setQuery( - nestedQuery( - "nested1", - boolQuery().must(termQuery("nested1.field1", "4")) - .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), - ScoreMode.Avg - ) + searchResponse = prepareSearch("test").setQuery( + nestedQuery( + "nested1", + boolQuery().must(termQuery("nested1.field1", "4")) + .must(nestedQuery("nested1.nested2", termQuery("nested1.nested2.field2", "2"), ScoreMode.Avg)), + ScoreMode.Avg ) - .get(); + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); } @@ -442,10 +421,9 @@ public void testExplain() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) - .setExplain(true) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total) + ).setExplain(true).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); Explanation explanation = searchResponse.getHits().getHits()[0].getExplanation(); @@ -526,7 +504,7 @@ public void testSimpleNestedSorting() throws Exception { .get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") + SearchResponse searchResponse = prepareSearch("test") .setQuery(QueryBuilders.matchAllQuery()) .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedSort(new NestedSortBuilder("nested1"))) @@ -540,7 +518,7 @@ public void testSimpleNestedSorting() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4")); - searchResponse = client().prepareSearch("test") + searchResponse = prepareSearch("test") .setQuery(QueryBuilders.matchAllQuery()) .addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedSort(new NestedSortBuilder("nested1"))) @@ -638,8 +616,7 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { .get(); refresh(); - SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test") - .setQuery(QueryBuilders.matchAllQuery()) + SearchRequestBuilder searchRequestBuilder = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .addSort( SortBuilders.fieldSort("nested1.field1") .setNestedSort(new NestedSortBuilder("nested1").setFilter(termQuery("nested1.field2", true))) @@ -661,8 +638,7 @@ public void testSimpleNestedSortingWithNestedFilterMissing() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10")); - searchRequestBuilder = client().prepareSearch("test") - .setQuery(QueryBuilders.matchAllQuery()) + searchRequestBuilder = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .addSort( SortBuilders.fieldSort("nested1.field1") .setNestedSort(new NestedSortBuilder("nested1").setFilter(termQuery("nested1.field2", true))) @@ -812,8 +788,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { refresh(); // access id = 1, read, max value, asc, should use matt and shay - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("acl.operation.user.username") .setNestedSort( @@ -836,8 +811,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("shay")); // access id = 1, read, min value, asc, should now use adrien and luca - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("acl.operation.user.username") .setNestedSort( @@ -860,8 +834,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("luca")); // execute, by matt or luca, by user id, sort missing first - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("acl.operation.user.id") .setNestedSort( @@ -887,8 +860,7 @@ public void testNestedSortWithMultiLevelFiltering() throws Exception { assertThat(searchResponse.getHits().getHits()[1].getSortValues()[0].toString(), equalTo("1")); // execute, by matt or luca, by username, sort missing last (default) - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("acl.operation.user.username") .setNestedSort( @@ -972,8 +944,7 @@ public void testLeakingSortValues() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(termQuery("_id", 2)) + SearchResponse searchResponse = prepareSearch().setQuery(termQuery("_id", 2)) .addSort( SortBuilders.fieldSort("nested1.nested2.sortVal") .setNestedSort( @@ -1155,8 +1126,7 @@ public void testSortNestedWithNestedFilter() throws Exception { refresh(); // Without nested filter - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child")) @@ -1175,8 +1145,7 @@ public void testSortNestedWithNestedFilter() throws Exception { // With nested filter NestedSortBuilder nestedSort = new NestedSortBuilder("parent.child"); nestedSort.setFilter(QueryBuilders.termQuery("parent.child.filter", true)); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)) .get(); assertHitCount(searchResponse, 3); @@ -1189,8 +1158,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); // Nested path should be automatically detected, expect same results as above search request - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("parent.child.child_values").setNestedSort(nestedSort).order(SortOrder.ASC)) .get(); @@ -1204,8 +1172,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); nestedSort.setFilter(QueryBuilders.termQuery("parent.filter", false)); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("parent.parent_values").setNestedSort(nestedSort).order(SortOrder.ASC)) .get(); @@ -1218,8 +1185,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("3")); assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort( @@ -1241,8 +1207,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("6")); // Check if closest nested type is resolved - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_obj.value") .setNestedSort(new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true))) @@ -1260,8 +1225,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); // Sort mode: sum - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child")) @@ -1279,8 +1243,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("11")); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child")) @@ -1299,8 +1262,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("2")); // Sort mode: sum with filter - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true))) @@ -1319,8 +1281,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); // Sort mode: avg - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child")) @@ -1338,8 +1299,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getId(), equalTo("1")); assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("3")); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child")) @@ -1358,8 +1318,7 @@ public void testSortNestedWithNestedFilter() throws Exception { assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("1")); // Sort mode: avg with filter - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("parent.child.child_values") .setNestedSort(new NestedSortBuilder("parent.child").setFilter(QueryBuilders.termQuery("parent.child.filter", true))) @@ -1523,8 +1482,9 @@ public void testNestedSortingWithNestedFilterAsFilter() throws Exception { assertTrue(indexResponse2.getShardInfo().getSuccessful() > 0); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .addSort(SortBuilders.fieldSort("users.first").setNestedSort(new NestedSortBuilder("users")).order(SortOrder.ASC)) + SearchResponse searchResponse = prepareSearch("test").addSort( + SortBuilders.fieldSort("users.first").setNestedSort(new NestedSortBuilder("users")).order(SortOrder.ASC) + ) .addSort( SortBuilders.fieldSort("users.first") .order(SortOrder.ASC) @@ -1586,9 +1546,9 @@ public void testCheckFixedBitSetCache() throws Exception { assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0L)); // only when querying with nested the fixed bitsets are loaded - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(nestedQuery("array1", termQuery("array1.field1", "value1"), ScoreMode.Avg)) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + nestedQuery("array1", termQuery("array1.field1", "value1"), ScoreMode.Avg) + ).get(); assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java index 015dc9628de21..9219641f1d3bf 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/VectorNestedIT.java @@ -66,10 +66,9 @@ public void testSimpleNested() throws Exception { assertThat(getResponse.getSourceAsBytes(), notNullValue()); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setKnnSearch(List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null))) - .setAllowPartialSearchResults(false) - .get(); + SearchResponse searchResponse = prepareSearch("test").setKnnSearch( + List.of(new KnnSearchBuilder("nested.vector", new float[] { 1, 1, 1 }, 1, 1, null)) + ).setAllowPartialSearchResults(false).get(); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java index d89f39a1452b6..526d523bb0638 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/aggregation/AggregationProfilerIT.java @@ -42,7 +42,7 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -96,7 +96,6 @@ protected void setupSuiteScopeCluster() throws Exception { indicesAdmin().prepareCreate("idx") .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) .setMapping(STRING_FIELD, "type=keyword", NUMBER_FIELD, "type=integer", TAG_FIELD, "type=keyword") - .get() ); List builders = new ArrayList<>(); @@ -123,11 +122,10 @@ protected void setupSuiteScopeCluster() throws Exception { } public void testSimpleProfile() { - SearchResponse response = client().prepareSearch("idx") - .setProfile(true) + SearchResponse response = prepareSearch("idx").setProfile(true) .addAggregation(histogram("histo").field(NUMBER_FIELD).interval(1L)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); @@ -159,8 +157,7 @@ public void testSimpleProfile() { } public void testMultiLevelProfile() { - SearchResponse response = client().prepareSearch("idx") - .setProfile(true) + SearchResponse response = prepareSearch("idx").setProfile(true) .addAggregation( histogram("histo").field(NUMBER_FIELD) .interval(1L) @@ -171,7 +168,7 @@ public void testMultiLevelProfile() { ) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); @@ -246,8 +243,7 @@ private void assertRemapTermsDebugInfo(ProfileResult termsAggResult, String... d } public void testMultiLevelProfileBreadthFirst() { - SearchResponse response = client().prepareSearch("idx") - .setProfile(true) + SearchResponse response = prepareSearch("idx").setProfile(true) .addAggregation( histogram("histo").field(NUMBER_FIELD) .interval(1L) @@ -258,7 +254,7 @@ public void testMultiLevelProfileBreadthFirst() { ) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); @@ -320,8 +316,7 @@ public void testMultiLevelProfileBreadthFirst() { } public void testDiversifiedAggProfile() { - SearchResponse response = client().prepareSearch("idx") - .setProfile(true) + SearchResponse response = prepareSearch("idx").setProfile(true) .addAggregation( diversifiedSampler("diversify").shardSize(10) .field(STRING_FIELD) @@ -329,7 +324,7 @@ public void testDiversifiedAggProfile() { .subAggregation(max("max").field(NUMBER_FIELD)) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); @@ -376,8 +371,7 @@ public void testDiversifiedAggProfile() { } public void testComplexProfile() { - SearchResponse response = client().prepareSearch("idx") - .setProfile(true) + SearchResponse response = prepareSearch("idx").setProfile(true) .addAggregation( histogram("histo").field(NUMBER_FIELD) .interval(1L) @@ -398,7 +392,7 @@ public void testComplexProfile() { ) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("idx").numPrimaries)); @@ -593,8 +587,7 @@ public void testComplexProfile() { } public void testNoProfile() { - SearchResponse response = client().prepareSearch("idx") - .setProfile(false) + SearchResponse response = prepareSearch("idx").setProfile(false) .addAggregation( histogram("histo").field(NUMBER_FIELD) .interval(1L) @@ -615,7 +608,7 @@ public void testNoProfile() { ) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(0)); @@ -633,7 +626,6 @@ public void testFilterByFilter() throws InterruptedException, IOException { indicesAdmin().prepareCreate("dateidx") .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) .setMapping("date", "type=date") - .get() ); List builders = new ArrayList<>(); for (int i = 0; i < RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 2; i++) { @@ -642,8 +634,7 @@ public void testFilterByFilter() throws InterruptedException, IOException { } indexRandom(true, false, builders); - SearchResponse response = client().prepareSearch("dateidx") - .setProfile(true) + SearchResponse response = prepareSearch("dateidx").setProfile(true) .addAggregation( new DateHistogramAggregationBuilder("histo").field("date") .calendarInterval(DateHistogramInterval.MONTH) @@ -652,7 +643,7 @@ public void testFilterByFilter() throws InterruptedException, IOException { .subAggregation(new MaxAggregationBuilder("m").field("date")) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("dateidx").numPrimaries)); @@ -708,7 +699,6 @@ public void testDateHistogramFilterByFilterDisabled() throws InterruptedExceptio indicesAdmin().prepareCreate("date_filter_by_filter_disabled") .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) .setMapping("date", "type=date", "keyword", "type=keyword") - .get() ); List builders = new ArrayList<>(); for (int i = 0; i < RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 2; i++) { @@ -720,11 +710,10 @@ public void testDateHistogramFilterByFilterDisabled() throws InterruptedExceptio } indexRandom(true, false, builders); - SearchResponse response = client().prepareSearch("date_filter_by_filter_disabled") - .setProfile(true) + SearchResponse response = prepareSearch("date_filter_by_filter_disabled").setProfile(true) .addAggregation(new DateHistogramAggregationBuilder("histo").field("date").calendarInterval(DateHistogramInterval.MONTH)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Map profileResults = response.getProfileResults(); assertThat(profileResults, notNullValue()); assertThat(profileResults.size(), equalTo(getNumShards("date_filter_by_filter_disabled").numPrimaries)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java index fce19a316b34a..f7b2b0f4443d3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/dfs/DfsProfilerIT.java @@ -67,8 +67,7 @@ public void testProfileDfs() throws Exception { for (int i = 0; i < iters; i++) { QueryBuilder q = randomQueryBuilder(List.of(textField), List.of(numericField), numDocs, 3); logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch() - .setQuery(q) + SearchResponse resp = prepareSearch().setQuery(q) .setTrackTotalHits(true) .setProfile(true) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index b3fb2cb93713f..e7b02faede9b1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -63,8 +63,7 @@ public void testProfileQuery() throws Exception { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch() - .setQuery(q) + SearchResponse resp = prepareSearch().setQuery(q) .setTrackTotalHits(true) .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -115,15 +114,13 @@ public void testProfileMatchesRegular() throws Exception { QueryBuilder q = randomQueryBuilder(stringFields, numericFields, numDocs, 3); logger.debug("Query: {}", q); - SearchRequestBuilder vanilla = client().prepareSearch("test") - .setQuery(q) + SearchRequestBuilder vanilla = prepareSearch("test").setQuery(q) .setProfile(false) .addSort("id.keyword", SortOrder.ASC) .setSearchType(SearchType.QUERY_THEN_FETCH) .setRequestCache(false); - SearchRequestBuilder profile = client().prepareSearch("test") - .setQuery(q) + SearchRequestBuilder profile = prepareSearch("test").setQuery(q) .setProfile(true) .addSort("id.keyword", SortOrder.ASC) .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -188,7 +185,7 @@ public void testSimpleMatch() throws Exception { QueryBuilder q = QueryBuilders.matchQuery("field1", "one"); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); Map p = resp.getProfileResults(); assertNotNull(p); @@ -229,7 +226,7 @@ public void testBool() throws Exception { .must(QueryBuilders.matchQuery("field1", "one")) .must(QueryBuilders.matchQuery("field1", "two")); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); Map p = resp.getProfileResults(); assertNotNull(p); @@ -290,7 +287,7 @@ public void testEmptyBool() throws Exception { QueryBuilder q = QueryBuilders.boolQuery(); logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); @@ -335,7 +332,7 @@ public void testCollapsingBool() throws Exception { logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); @@ -375,7 +372,7 @@ public void testBoosting() throws Exception { .negativeBoost(randomFloat()); logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); @@ -415,7 +412,7 @@ public void testDisMaxRange() throws Exception { .add(QueryBuilders.rangeQuery("field2").from(null).to(73).includeLower(true).includeUpper(true)); logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); @@ -454,7 +451,7 @@ public void testRange() throws Exception { logger.info("Query: {}", q.toString()); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(true).setSearchType(SearchType.QUERY_THEN_FETCH).get(); assertNotNull("Profile response element should not be null", resp.getProfileResults()); assertThat("Profile response should not be an empty array", resp.getProfileResults().size(), not(0)); @@ -495,8 +492,7 @@ public void testPhrase() throws Exception { logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch() - .setQuery(q) + SearchResponse resp = prepareSearch().setQuery(q) .setIndices("test") .setProfile(true) .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -547,7 +543,7 @@ public void testNoProfile() throws Exception { logger.info("Query: {}", q); - SearchResponse resp = client().prepareSearch().setQuery(q).setProfile(false).get(); + SearchResponse resp = prepareSearch().setQuery(q).setProfile(false).get(); assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index dd3e160054f6b..099100a7a67e3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -31,15 +31,14 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; public class ExistsIT extends ESIntegTestCase { // TODO: move this to a unit test somewhere... public void testEmptyIndex() throws Exception { createIndex("test"); - assertNoFailures(client().prepareSearch("test").setQuery(QueryBuilders.existsQuery("foo"))); - assertNoFailures(client().prepareSearch("test").setQuery(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("foo")))); + assertNoFailures(prepareSearch("test").setQuery(QueryBuilders.existsQuery("foo"))); + assertNoFailures(prepareSearch("test").setQuery(QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("foo")))); } public void testExists() throws Exception { @@ -114,15 +113,15 @@ public void testExists() throws Exception { expected.put("vec", 2); final long numDocs = sources.length; - SearchResponse allDocs = client().prepareSearch("idx").setSize(sources.length).get(); - assertSearchResponse(allDocs); + SearchResponse allDocs = prepareSearch("idx").setSize(sources.length).get(); + assertNoFailures(allDocs); assertHitCount(allDocs, numDocs); for (Map.Entry entry : expected.entrySet()) { final String fieldName = entry.getKey(); final int count = entry.getValue(); // exists - SearchResponse resp = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).get(); - assertSearchResponse(resp); + SearchResponse resp = prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).get(); + assertNoFailures(resp); try { assertEquals( String.format( @@ -200,8 +199,8 @@ public void testFieldAlias() throws Exception { String fieldName = entry.getKey(); int expectedCount = entry.getValue(); - SearchResponse response = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.existsQuery(fieldName)).get(); + assertNoFailures(response); assertHitCount(response, expectedCount); } } @@ -232,8 +231,8 @@ public void testFieldAliasWithNoDocValues() throws Exception { indexRequests.add(client().prepareIndex("idx").setSource("foo", 43)); indexRandom(true, false, indexRequests); - SearchResponse response = client().prepareSearch("idx").setQuery(QueryBuilders.existsQuery("foo-alias")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").setQuery(QueryBuilders.existsQuery("foo-alias")).get(); + assertNoFailures(response); assertHitCount(response, 2); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java index c2ccfe4ee9694..1e18c0ca3c59c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/IntervalQueriesIT.java @@ -56,11 +56,9 @@ public void testEmptyIntervalsWithNestedMappings() throws InterruptedException { client().prepareIndex("nested").setId("3").setSource("text", "quick") ); - SearchResponse resp = client().prepareSearch("nested") - .setQuery( - new IntervalQueryBuilder("empty_text", new IntervalsSourceProvider.Match("an empty query", 0, true, null, null, null)) - ) - .get(); + SearchResponse resp = prepareSearch("nested").setQuery( + new IntervalQueryBuilder("empty_text", new IntervalsSourceProvider.Match("an empty query", 0, true, null, null, null)) + ).get(); assertEquals(0, resp.getFailedShards()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 9e55c38336c1b..f251ab5cb6269 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -53,7 +53,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.anyOf; @@ -267,13 +267,11 @@ private XContentBuilder createMapping() throws IOException { public void testDefaults() throws ExecutionException, InterruptedException { MatchQueryParser.Type type = MatchQueryParser.Type.BOOLEAN; - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) - ) + SearchResponse searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) ) - .get(); + ).get(); Set topNIds = Sets.newHashSet("theone", "theother"); for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { topNIds.remove(searchResponse.getHits().getAt(i).getId()); @@ -283,91 +281,78 @@ public void testDefaults() throws ExecutionException, InterruptedException { assertThat(topNIds, empty()); assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) - .type(type) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").operator(Operator.OR) + .type(type) ) - .get(); + ).get(); assertFirstHit(searchResponse, anyOf(hasId("theone"), hasId("theother"))); assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").operator(Operator.OR).type(type) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").operator(Operator.OR).type(type) ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theother")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").operator(Operator.AND).type(type) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); } public void testPhraseType() { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") - .operator(Operator.OR) - .type(MatchQueryParser.Type.PHRASE) - ) + SearchResponse searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("Man the Ultimate", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase") + .operator(Operator.OR) + .type(MatchQueryParser.Type.PHRASE) ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("ultimate2")); assertHitCount(searchResponse, 1L); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase").operator( - Operator.OR - ).type(MatchQueryParser.Type.PHRASE) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("Captain", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase").operator( + Operator.OR + ).type(MatchQueryParser.Type.PHRASE) ) - .get(); + ).get(); assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(1L)); - searchResponse = client().prepareSearch("test") - .setQuery( + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery( randomizeType( multiMatchQuery("the Ul", "full_name_phrase", "first_name_phrase", "last_name_phrase", "category_phrase").operator( Operator.OR ).type(MatchQueryParser.Type.PHRASE_PREFIX) ) - ) - .get(); - assertSearchHits(searchResponse, "ultimate2", "ultimate1"); - assertHitCount(searchResponse, 2L); + ), + "ultimate2", + "ultimate1" + ); } public void testSingleField() throws NoSuchFieldException, IllegalAccessException { - SearchResponse searchResponse = client().prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill"))).get(); + SearchResponse searchResponse = prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill"))).get(); assertNoFailures(searchResponse); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("15", "skill", "int-field")).analyzer("category")) + searchResponse = prepareSearch("test").setQuery(randomizeType(multiMatchQuery("15", "skill", "int-field")).analyzer("category")) .get(); assertNoFailures(searchResponse); assertFirstHit(searchResponse, hasId("theone")); @@ -408,7 +393,7 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio builder.append(RandomPicks.randomFrom(random(), query)).append(" "); } MultiMatchQueryBuilder multiMatchQueryBuilder = randomizeType(multiMatchQuery(builder.toString(), field)); - SearchResponse multiMatchResp = client().prepareSearch("test") + SearchResponse multiMatchResp = prepareSearch("test") // id sort field is a tie, in case hits have the same score, // the hits will be sorted the same consistently .addSort("_score", SortOrder.DESC) @@ -417,7 +402,7 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio .get(); MatchQueryBuilder matchQueryBuilder = QueryBuilders.matchQuery(field, builder.toString()); - SearchResponse matchResp = client().prepareSearch("test") + SearchResponse matchResp = prepareSearch("test") // id tie sort .addSort("_score", SortOrder.DESC) .addSort("id", SortOrder.ASC) @@ -442,7 +427,7 @@ public void testSingleField() throws NoSuchFieldException, IllegalAccessExceptio public void testEquivalence() { - final int numDocs = (int) client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value; + final int numDocs = (int) prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value; int numIters = scaledRandomIntBetween(5, 10); for (int i = 0; i < numIters; i++) { { @@ -450,15 +435,13 @@ public void testEquivalence() { MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("marvel hero captain america", "*_name", randomBoolean() ? "category" : "categ*"); - SearchResponse left = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse left = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery(randomizeType(multiMatchQueryBuilder.operator(Operator.OR).type(type))) .get(); - SearchResponse right = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse right = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -478,8 +461,7 @@ public void testEquivalence() { MultiMatchQueryBuilder multiMatchQueryBuilder = randomBoolean() ? multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category") : multiMatchQuery("captain america", "*_name", randomBoolean() ? "category" : "categ*"); - SearchResponse left = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse left = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -487,8 +469,7 @@ public void testEquivalence() { ) .get(); - SearchResponse right = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse right = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -508,8 +489,7 @@ public void testEquivalence() { { String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); - SearchResponse left = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse left = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -521,8 +501,7 @@ public void testEquivalence() { ) .get(); - SearchResponse right = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse right = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -539,8 +518,7 @@ public void testEquivalence() { String minShouldMatch = randomBoolean() ? null : "" + between(0, 1); SearchResponse left; if (randomBoolean()) { - left = client().prepareSearch("test") - .setSize(numDocs) + left = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -552,8 +530,7 @@ public void testEquivalence() { ) .get(); } else { - left = client().prepareSearch("test") - .setSize(numDocs) + left = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -565,8 +542,7 @@ public void testEquivalence() { ) .get(); } - SearchResponse right = client().prepareSearch("test") - .setSize(numDocs) + SearchResponse right = prepareSearch("test").setSize(numDocs) .addSort(SortBuilders.scoreSort()) .addSort(SortBuilders.fieldSort("id")) .setQuery( @@ -583,208 +559,175 @@ public void testEquivalence() { } public void testCrossFieldMode() throws ExecutionException, InterruptedException { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.OR) - ) + SearchResponse searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .operator(Operator.OR) ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.OR) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero captain america", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.OR) ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theother")); assertSecondHit(searchResponse, hasId("theone")); assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.OR) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("marvel hero", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.OR) ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theother")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").lenient(true).operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").lenient(true).operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill", "int-field").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").lenient(true).operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "full_name", "first_name", "last_name", "category", "skill", "int-field").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").lenient(true).operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america 15", "skill", "full_name", "first_name", "last_name", "category", "int-field").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").lenient(true).operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "skill", "full_name", "first_name", "last_name", "category", "int-field").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").lenient(true).operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america 15", "first_name", "last_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .lenient(true) - .analyzer("category") - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america 15", "first_name", "last_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .lenient(true) + .analyzer("category") ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category"))) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("25 15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category"))) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("25 15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category")) + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("25 15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category") - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).analyzer("category") ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("25 15", "first_name", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .analyzer("category") - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "first_name", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("25 15", "int-field", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .analyzer("category") - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "int-field", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("25 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .analyzer("category") - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("25 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .analyzer("category") ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").operator(Operator.OR) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").operator(Operator.OR) ) - .get(); + ).get(); assertFirstHit(searchResponse, hasId("theone")); // test group based on analyzer -- all fields are grouped into a cross field search - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).analyzer("category").operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).analyzer("category").operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); // counter example assertHitCount( - client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( - randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE - ).operator(Operator.AND) - ) - ), + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( + randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE + ).operator(Operator.AND) + ) + ), 0L ); // counter example assertHitCount( - client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( - randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE - ).operator(Operator.AND) - ) - ), + prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("captain america marvel hero", "first_name", "last_name", "category").type( + randomBoolean() ? MultiMatchQueryBuilder.Type.CROSS_FIELDS : MultiMatchQueryBuilder.DEFAULT_TYPE + ).operator(Operator.AND) + ) + ), 0L ); // test if boosts work - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("the ultimate", "full_name", "first_name", "category").field("last_name", 10) - .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("the ultimate", "full_name", "first_name", "category").field("last_name", 10) + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) + .operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("ultimate1")); // has ultimate in the last_name and that is boosted assertSecondHit(searchResponse, hasId("ultimate2")); @@ -792,55 +735,47 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException // since we try to treat the matching fields as one field scores are very similar but we have a small bias towards the // more frequent field that acts as a tie-breaker internally - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").type( - MultiMatchQueryBuilder.Type.CROSS_FIELDS - ).operator(Operator.AND) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("the ultimate", "full_name", "first_name", "last_name", "category").type( + MultiMatchQueryBuilder.Type.CROSS_FIELDS + ).operator(Operator.AND) ) - .get(); + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("ultimate2")); assertSecondHit(searchResponse, hasId("ultimate1")); assertThat(searchResponse.getHits().getHits()[0].getScore(), greaterThan(searchResponse.getHits().getHits()[1].getScore())); // Test group based on numeric fields - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("15", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "skill", "first_name").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); // Two numeric fields together caused trouble at one point! - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS))) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "int-field", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) - ) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)) + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("theone")); - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("alpha 15", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true) - ) - ) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("alpha 15", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true)) + ).get(); /* * Doesn't find the one because "alpha 15" isn't a number and we don't * break on spaces. @@ -849,21 +784,18 @@ public void testCrossFieldMode() throws ExecutionException, InterruptedException assertFirstHit(searchResponse, hasId("ultimate1")); // Lenient wasn't always properly lenient with two numeric fields - searchResponse = client().prepareSearch("test") - .setQuery( - randomizeType( - multiMatchQuery("alpha 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS) - .lenient(true) - ) + searchResponse = prepareSearch("test").setQuery( + randomizeType( + multiMatchQuery("alpha 15", "int-field", "first_name", "skill").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).lenient(true) ) - .get(); + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("ultimate1")); // Check that cross fields works with date fields - searchResponse = client().prepareSearch("test") - .setQuery(randomizeType(multiMatchQuery("now", "f*", "date").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).lenient(true)) - .get(); + searchResponse = prepareSearch("test").setQuery( + randomizeType(multiMatchQuery("now", "f*", "date").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)).lenient(true) + ).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("nowHero")); } @@ -887,8 +819,7 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution builders.add(client().prepareIndex(idx).setId("2").setSource("title", "bar", "body", "foo")); indexRandom(true, false, builders); - SearchResponse searchResponse = client().prepareSearch(idx) - .setExplain(true) + SearchResponse searchResponse = prepareSearch(idx).setExplain(true) .setQuery(multiMatchQuery("foo").field("title", 100).field("body").fuzziness(Fuzziness.ZERO)) .get(); SearchHit[] hits = searchResponse.getHits().getHits(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index 8b83c9d18a850..882e18eb593aa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -49,15 +49,15 @@ public void testBasicAllQuery() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); + SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo")).get(); assertHitCount(resp, 2L); assertHits(resp.getHits(), "1", "3"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("bar")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("bar")).get(); assertHitCount(resp, 2L); assertHits(resp.getHits(), "1", "3"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); assertHitCount(resp, 3L); assertHits(resp.getHits(), "1", "2", "3"); } @@ -68,19 +68,19 @@ public void testWithDate() throws Exception { reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); + SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); assertHits(resp.getHits(), "1"); assertHitCount(resp, 1L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("bar \"2015/09/02\"")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); } @@ -95,19 +95,19 @@ public void testWithLotsOfTypes() throws Exception { ); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); + SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo bar")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")).get(); assertHits(resp.getHits(), "1"); assertHitCount(resp, 1L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); } @@ -118,31 +118,31 @@ public void testDocWithAllTypes() throws Exception { reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); + SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("Bar")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("Baz")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("Baz")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("19")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("19")).get(); assertHits(resp.getHits(), "1"); // nested doesn't match because it's hidden - resp = client().prepareSearch("test").setQuery(queryStringQuery("1476383971")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("1476383971")).get(); assertHits(resp.getHits(), "1"); // bool doesn't match - resp = client().prepareSearch("test").setQuery(queryStringQuery("7")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("7")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("23")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("23")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("1293")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("1293")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("42")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("42")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("1.7")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("1.7")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("1.5")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("1.5")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("127.0.0.1")).get(); assertHits(resp.getHits(), "1"); // binary doesn't match // suggest doesn't match @@ -156,15 +156,15 @@ public void testKeywordWithWhitespace() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(queryStringQuery("foo")).get(); + SearchResponse resp = prepareSearch("test").setQuery(queryStringQuery("foo")).get(); assertHits(resp.getHits(), "3"); assertHitCount(resp, 1L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("bar")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("bar")).get(); assertHits(resp.getHits(), "2", "3"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(queryStringQuery("Foo Bar")).get(); + resp = prepareSearch("test").setQuery(queryStringQuery("Foo Bar")).get(); assertHits(resp.getHits(), "1", "2", "3"); assertHitCount(resp, 3L); } @@ -180,11 +180,9 @@ public void testAllFields() throws Exception { reqs.add(client().prepareIndex("test_1").setId("1").setSource("f1", "foo", "f2", "eggplant")); indexRandom(true, false, reqs); - assertHitCount(client().prepareSearch("test_1").setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.AND)), 0L); + assertHitCount(prepareSearch("test_1").setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.AND)), 0L); - SearchResponse resp = client().prepareSearch("test_1") - .setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.OR)) - .get(); + SearchResponse resp = prepareSearch("test_1").setQuery(queryStringQuery("foo eggplant").defaultOperator(Operator.OR)).get(); assertHits(resp.getHits(), "1"); assertHitCount(resp, 1L); } @@ -195,11 +193,11 @@ public void testPhraseQueryOnFieldWithNoPositions() throws Exception { reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "foo bar", "f4", "chicken parmesan")); indexRandom(true, false, reqs); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("\"eggplant parmesan\"").lenient(true)), 0L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("\"eggplant parmesan\"").lenient(true)), 0L); Exception exc = expectThrows( Exception.class, - () -> client().prepareSearch("test").setQuery(queryStringQuery("f4:\"eggplant parmesan\"").lenient(false)).get() + () -> prepareSearch("test").setQuery(queryStringQuery("f4:\"eggplant parmesan\"").lenient(false)).get() ); IllegalStateException ise = (IllegalStateException) ExceptionsHelper.unwrap(exc, IllegalStateException.class); assertNotNull(ise); @@ -207,10 +205,7 @@ public void testPhraseQueryOnFieldWithNoPositions() throws Exception { } public void testBooleanStrictQuery() throws Exception { - Exception e = expectThrows( - Exception.class, - () -> client().prepareSearch("test").setQuery(queryStringQuery("foo").field("f_bool")).get() - ); + Exception e = expectThrows(Exception.class, () -> prepareSearch("test").setQuery(queryStringQuery("foo").field("f_bool")).get()); assertThat( ExceptionsHelper.unwrap(e, IllegalArgumentException.class).getMessage(), containsString("Can't parse boolean value [foo], expected [true] or [false]") @@ -220,7 +215,7 @@ public void testBooleanStrictQuery() throws Exception { public void testAllFieldsWithSpecifiedLeniency() throws IOException { Exception e = expectThrows( Exception.class, - () -> client().prepareSearch("test").setQuery(queryStringQuery("f_date:[now-2D TO now]").lenient(false)).get() + () -> prepareSearch("test").setQuery(queryStringQuery("f_date:[now-2D TO now]").lenient(false)).get() ); assertThat(e.getCause().getMessage(), containsString("unit [D] not supported for date math [-2D]")); } @@ -232,7 +227,7 @@ public void testFieldAlias() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("value").field("f3_alias")).get(); + SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("value").field("f3_alias")).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -246,7 +241,7 @@ public void testFieldAliasWithEmbeddedFieldNames() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("f3_alias:value AND f2:three")).get(); + SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("f3_alias:value AND f2:three")).get(); assertNoFailures(response); assertHitCount(response, 1); @@ -260,7 +255,7 @@ public void testFieldAliasWithWildcardField() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("value").field("f3_*")).get(); + SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("value").field("f3_*")).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -274,7 +269,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { // The wildcard field matches aliases for both a text and geo_point field. // By default, the geo_point field should be ignored when building the query. - SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")).get(); + SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")).get(); assertNoFailures(response); assertHitCount(response, 1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java index 40b8000b30b7e..c9c7c2a56eea9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ScriptScoreQueryIT.java @@ -73,7 +73,7 @@ public void testScriptScore() { Map params = new HashMap<>(); params.put("param1", 0.1); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", params); - SearchResponse resp = client().prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)).get(); + SearchResponse resp = prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)).get(); assertNoFailures(resp); assertOrderedSearchHits(resp, "10", "8", "6", "4", "2"); assertFirstHit(resp, hasScore(1.0f)); @@ -81,9 +81,7 @@ public void testScriptScore() { assertThirdHit(resp, hasScore(0.6f)); // applying min score - resp = client().prepareSearch("test-index") - .setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script).setMinScore(0.6f)) - .get(); + resp = prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script).setMinScore(0.6f)).get(); assertNoFailures(resp); assertOrderedSearchHits(resp, "10", "8", "6"); } @@ -100,7 +98,7 @@ public void testScriptScoreBoolQuery() { params.put("param1", 0.1); Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", params); QueryBuilder boolQuery = boolQuery().should(matchQuery("field1", "text1")).should(matchQuery("field1", "text10")); - SearchResponse resp = client().prepareSearch("test-index").setQuery(scriptScoreQuery(boolQuery, script)).get(); + SearchResponse resp = prepareSearch("test-index").setQuery(scriptScoreQuery(boolQuery, script)).get(); assertNoFailures(resp); assertOrderedSearchHits(resp, "10", "1"); assertFirstHit(resp, hasScore(1.0f)); @@ -120,7 +118,7 @@ public void testRewrittenQuery() { RangeQueryBuilder rangeQB = new RangeQueryBuilder("field1").from("2019-01-01"); // the query should be rewritten to from:null Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", Map.of("param1", 0.1)); - SearchResponse resp = client().prepareSearch("test-index2").setQuery(scriptScoreQuery(rangeQB, script)).get(); + SearchResponse resp = prepareSearch("test-index2").setQuery(scriptScoreQuery(rangeQB, script)).get(); assertNoFailures(resp); assertOrderedSearchHits(resp, "3", "2", "1"); } @@ -136,14 +134,14 @@ public void testDisallowExpensiveQueries() { // Execute with search.allow_expensive_queries = null => default value = true => success Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['field2'].value * param1", Map.of("param1", 0.1)); - assertNoFailures(client().prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script))); + assertNoFailures(prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script))); // Set search.allow_expensive_queries to "false" => assert failure updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", false)); ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)).get() + () -> prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script)).get() ); assertEquals( "[script score] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", @@ -152,7 +150,7 @@ public void testDisallowExpensiveQueries() { // Set search.allow_expensive_queries to "true" => success updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); - assertNoFailures(client().prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script))); + assertNoFailures(prepareSearch("test-index").setQuery(scriptScoreQuery(matchQuery("field1", "text0"), script))); } finally { updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 18a5ad78995da..918746021f381 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -104,9 +104,10 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasScore; @@ -145,8 +146,8 @@ public void testEmptyQueryString() throws ExecutionException, InterruptedExcepti client().prepareIndex("test").setId("3").setSource("field1", "quick") ); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("quick")), 3L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("")), 0L); // return no docs + assertHitCount(prepareSearch().setQuery(queryStringQuery("quick")), 3L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("")), 0L); // return no docs } // see https://github.com/elastic/elasticsearch/issues/3177 @@ -160,8 +161,7 @@ public void testIssue3177() { forceMerge(); refresh(); assertHitCount( - client().prepareSearch() - .setQuery(matchAllQuery()) + prepareSearch().setQuery(matchAllQuery()) .setPostFilter( boolQuery().must(matchAllQuery()) .must(boolQuery().mustNot(boolQuery().must(termQuery("field1", "value1")).must(termQuery("field1", "value2")))) @@ -169,20 +169,16 @@ public void testIssue3177() { 3L ); assertHitCount( - client().prepareSearch() - .setQuery( - boolQuery().must( - boolQuery().should(termQuery("field1", "value1")) - .should(termQuery("field1", "value2")) - .should(termQuery("field1", "value3")) - ).filter(boolQuery().mustNot(boolQuery().must(termQuery("field1", "value1")).must(termQuery("field1", "value2")))) - ), + prepareSearch().setQuery( + boolQuery().must( + boolQuery().should(termQuery("field1", "value1")) + .should(termQuery("field1", "value2")) + .should(termQuery("field1", "value3")) + ).filter(boolQuery().mustNot(boolQuery().must(termQuery("field1", "value1")).must(termQuery("field1", "value2")))) + ), 3L ); - assertHitCount( - client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(boolQuery().mustNot(termQuery("field1", "value3"))), - 2L - ); + assertHitCount(prepareSearch().setQuery(matchAllQuery()).setPostFilter(boolQuery().mustNot(termQuery("field1", "value3"))), 2L); } public void testIndexOptions() throws Exception { @@ -193,10 +189,10 @@ public void testIndexOptions() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); - assertHitCount(client().prepareSearch().setQuery(matchPhraseQuery("field2", "quick brown").slop(0)), 1L); + assertHitCount(prepareSearch().setQuery(matchPhraseQuery("field2", "quick brown").slop(0)), 1L); assertFailures( - client().prepareSearch().setQuery(matchPhraseQuery("field1", "quick brown").slop(0)), + prepareSearch().setQuery(matchPhraseQuery("field1", "quick brown").slop(0)), RestStatus.BAD_REQUEST, containsString("field:[field1] was indexed without position data; cannot run PhraseQuery") ); @@ -212,36 +208,30 @@ public void testConstantScoreQuery() throws Exception { client().prepareIndex("test").setId("2").setSource("field1", "quick lazy huge brown fox", "field2", "quick lazy huge brown fox") ); - SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))).get(); + SearchResponse searchResponse = prepareSearch().setQuery(constantScoreQuery(matchQuery("field1", "quick"))).get(); assertHitCount(searchResponse, 2L); for (SearchHit searchHit : searchResponse.getHits().getHits()) { assertThat(searchHit, hasScore(1.0f)); } - searchResponse = client().prepareSearch("test") - .setQuery( - boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())) - ) - .get(); + searchResponse = prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())) + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).getScore())); - client().prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())).get(); + prepareSearch("test").setQuery(constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + random().nextFloat())).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).getScore())); - searchResponse = client().prepareSearch("test") - .setQuery( - constantScoreQuery( - boolQuery().must(matchAllQuery()) - .must( - constantScoreQuery(matchQuery("field1", "quick")).boost( - 1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat()) - ) - ) - ) + searchResponse = prepareSearch("test").setQuery( + constantScoreQuery( + boolQuery().must(matchAllQuery()) + .must( + constantScoreQuery(matchQuery("field1", "quick")).boost(1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat())) + ) ) - .get(); + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasScore(searchResponse.getHits().getAt(1).getScore())); for (SearchHit searchHit : searchResponse.getHits().getHits()) { @@ -259,19 +249,16 @@ public void testConstantScoreQuery() throws Exception { int queryRounds = scaledRandomIntBetween(10, 20); for (int i = 0; i < queryRounds; i++) { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); - searchResponse = client().prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num).get(); + searchResponse = prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num).get(); long totalHits = searchResponse.getHits().getTotalHits().value; SearchHits hits = searchResponse.getHits(); for (SearchHit searchHit : hits) { assertThat(searchHit, hasScore(1.0f)); } - searchResponse = client().prepareSearch("test_1") - .setQuery( - boolQuery().must(matchAllQuery()) - .must(constantScoreQuery(matchQuery).boost(1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat()))) - ) - .setSize(num) - .get(); + searchResponse = prepareSearch("test_1").setQuery( + boolQuery().must(matchAllQuery()) + .must(constantScoreQuery(matchQuery).boost(1.0f + (random.nextBoolean() ? 0.0f : random.nextFloat()))) + ).setSize(num).get(); hits = searchResponse.getHits(); assertThat(hits.getTotalHits().value, equalTo(totalHits)); if (totalHits > 1) { @@ -294,11 +281,11 @@ public void testAllDocsQueryString() throws InterruptedException, ExecutionExcep int iters = scaledRandomIntBetween(100, 200); for (int i = 0; i < iters; i++) { - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("*:*^10.0").boost(10.0f)), 2L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("*:*^10.0").boost(10.0f)), 2L); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchAllQuery()))) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + boolQuery().must(matchAllQuery()).must(constantScoreQuery(matchAllQuery())) + ).get(); assertHitCount(searchResponse, 2L); assertThat((double) searchResponse.getHits().getAt(0).getScore(), closeTo(2.0, 0.1)); assertThat((double) searchResponse.getHits().getAt(1).getScore(), closeTo(2.0, 0.1)); @@ -311,11 +298,11 @@ public void testQueryStringAnalyzedWildcard() throws Exception { client().prepareIndex("test").setId("1").setSource("field1", "value_1", "field2", "value_2").get(); refresh(); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("value*")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("*ue*")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("*ue_1")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("val*e_1")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("v?l*e?1")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("value*")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("*ue*")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("*ue_1")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("val*e_1")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("v?l*e?1")), 1L); } public void testLowercaseExpandedTerms() { @@ -324,10 +311,10 @@ public void testLowercaseExpandedTerms() { client().prepareIndex("test").setId("1").setSource("field1", "value_1", "field2", "value_2").get(); refresh(); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("VALUE_3~1")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("ValUE_*")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("vAl*E_1")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("VALUE_3~1")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("ValUE_*")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("vAl*E_1")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("[VALUE_1 TO VALUE_3]")), 1L); } // Issue #3540 @@ -342,12 +329,12 @@ public void testDateRangeInQueryString() { client().prepareIndex("test").setId("1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("past:[now-2M/d TO now/d]")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("future:[now/d TO now+2M/d]")), 1L); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch().setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lenient(false)).get() + () -> prepareSearch().setQuery(queryStringQuery("future:[now/D TO now+2M/d]").lenient(false)).get() ); assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(e.toString(), containsString("unit [D] not supported for date math")); @@ -365,7 +352,7 @@ public void testDateRangeInQueryStringWithTimeZone_7880() { client().prepareIndex("test").setId("1").setSource("past", now).get(); refresh(); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("past:[now-1m/m TO now+1m/m]").timeZone(timeZone.getId())), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("past:[now-1m/m TO now+1m/m]").timeZone(timeZone.getId())), 1L); } // Issue #10477 @@ -379,27 +366,20 @@ public void testDateRangeInQueryStringWithTimeZone_10477() { refresh(); // Timezone set with dates - assertHitCount( - client().prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00+0200 TO 2015-04-06T23:00:00+0200]")), - 2L - ); + assertHitCount(prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00+0200 TO 2015-04-06T23:00:00+0200]")), 2L); // Same timezone set with time_zone assertHitCount( - client().prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00 TO 2015-04-06T23:00:00]").timeZone("+0200")), + prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00 TO 2015-04-06T23:00:00]").timeZone("+0200")), 2L ); // We set a timezone which will give no result - assertHitCount( - client().prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00-0200 TO 2015-04-06T23:00:00-0200]")), - 0L - ); + assertHitCount(prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00-0200 TO 2015-04-06T23:00:00-0200]")), 0L); // Same timezone set with time_zone but another timezone is set directly within dates which has the precedence assertHitCount( - client().prepareSearch() - .setQuery(queryStringQuery("past:[2015-04-06T00:00:00-0200 TO 2015-04-06T23:00:00-0200]").timeZone("+0200")), + prepareSearch().setQuery(queryStringQuery("past:[2015-04-06T00:00:00-0200 TO 2015-04-06T23:00:00-0200]").timeZone("+0200")), 0L ); } @@ -414,20 +394,13 @@ public void testIdsQueryTestsIdIndexed() throws Exception { client().prepareIndex("test").setId("3").setSource("field1", "value3") ); - SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); - - searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))), "1", "3"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(idsQuery().addIds("1", "3")), "1", "3"); - assertHitCount(client().prepareSearch().setQuery(idsQuery().addIds("7", "10")), 0L); + assertHitCount(prepareSearch().setQuery(idsQuery().addIds("7", "10")), 0L); // repeat..., with terms - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))), "1", "3"); } public void testTermIndexQuery() throws Exception { @@ -440,27 +413,16 @@ public void testTermIndexQuery() throws Exception { } for (String indexName : indexNames) { - SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termQuery("_index", indexName))).get(); - SearchResponse searchResponse = assertSearchResponse(request); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, indexName + "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(constantScoreQuery(termQuery("_index", indexName))), indexName + "1"); } for (String indexName : indexNames) { - SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexName))).get(); - SearchResponse searchResponse = assertSearchResponse(request); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, indexName + "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexName))), indexName + "1"); } for (String indexName : indexNames) { - SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(matchQuery("_index", indexName))).get(); - SearchResponse searchResponse = assertSearchResponse(request); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, indexName + "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(constantScoreQuery(matchQuery("_index", indexName))), indexName + "1"); } { - SearchResponse request = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexNames))).get(); - SearchResponse searchResponse = assertSearchResponse(request); - assertHitCount(searchResponse, indexNames.length); + assertHitCountAndNoFailures(prepareSearch().setQuery(constantScoreQuery(termsQuery("_index", indexNames))), indexNames.length); } } @@ -516,35 +478,15 @@ public void testFilterExistsMissing() throws Exception { ) ); - SearchResponse searchResponse = client().prepareSearch().setQuery(existsQuery("field1")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - - searchResponse = client().prepareSearch().setQuery(constantScoreQuery(existsQuery("field1"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - - searchResponse = client().prepareSearch().setQuery(queryStringQuery("_exists_:field1")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - - searchResponse = client().prepareSearch().setQuery(existsQuery("field2")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); - - searchResponse = client().prepareSearch().setQuery(existsQuery("field3")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("4")); - + assertSearchHitsWithoutFailures(prepareSearch().setQuery(existsQuery("field1")), "1", "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(constantScoreQuery(existsQuery("field1"))), "1", "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(queryStringQuery("_exists_:field1")), "1", "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(existsQuery("field2")), "1", "3"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(existsQuery("field3")), "4"); // wildcard check - searchResponse = client().prepareSearch().setQuery(existsQuery("x*")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - + assertSearchHitsWithoutFailures(prepareSearch().setQuery(existsQuery("x*")), "1", "2"); // object check - searchResponse = client().prepareSearch().setQuery(existsQuery("obj1")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(existsQuery("obj1")), "1", "2"); } public void testPassQueryOrFilterAsJSONString() throws Exception { @@ -553,13 +495,13 @@ public void testPassQueryOrFilterAsJSONString() throws Exception { client().prepareIndex("test").setId("1").setSource("field1", "value1_1", "field2", "value2_1").setRefreshPolicy(IMMEDIATE).get(); WrapperQueryBuilder wrapper = new WrapperQueryBuilder("{ \"term\" : { \"field1\" : \"value1_1\" } }"); - assertHitCount(client().prepareSearch().setQuery(wrapper), 1L); + assertHitCount(prepareSearch().setQuery(wrapper), 1L); BoolQueryBuilder bool = boolQuery().must(wrapper).must(new TermQueryBuilder("field2", "value2_1")); - assertHitCount(client().prepareSearch().setQuery(bool), 1L); + assertHitCount(prepareSearch().setQuery(bool), 1L); WrapperQueryBuilder wrapperFilter = wrapperQuery("{ \"term\" : { \"field1\" : \"value1_1\" } }"); - assertHitCount(client().prepareSearch().setPostFilter(wrapperFilter), 1L); + assertHitCount(prepareSearch().setPostFilter(wrapperFilter), 1L); } public void testFiltersWithCustomCacheKey() throws Exception { @@ -567,10 +509,10 @@ public void testFiltersWithCustomCacheKey() throws Exception { client().prepareIndex("test").setId("1").setSource("field1", "value1").get(); refresh(); - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("field1", "value1"))), 1L); } public void testMatchQueryNumeric() throws Exception { @@ -583,14 +525,14 @@ public void testMatchQueryNumeric() throws Exception { client().prepareIndex("test").setId("3").setSource("long", 3L, "double", 3.0d) ); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("long", "1")).get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchQuery("long", "1")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch().setQuery(matchQuery("double", "2")).get(); + searchResponse = prepareSearch().setQuery(matchQuery("double", "2")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); - expectThrows(SearchPhaseExecutionException.class, () -> client().prepareSearch().setQuery(matchQuery("double", "2 3 4")).get()); + expectThrows(SearchPhaseExecutionException.class, () -> prepareSearch().setQuery(matchQuery("double", "2 3 4")).get()); } public void testMatchQueryFuzzy() throws Exception { @@ -601,23 +543,20 @@ public void testMatchQueryFuzzy() throws Exception { client().prepareIndex("test").setId("1").setSource("text", "Unit"), client().prepareIndex("test").setId("2").setSource("text", "Unity") ); - assertHitCount(client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromEdits(0))), 0L); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromEdits(1))) - .get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - - searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromString("AUTO"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); + assertHitCount(prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromEdits(0))), 0L); - assertHitCount(client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromString("AUTO:5,7"))), 0L); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromEdits(1))), "1", "2"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromString("AUTO"))), + "1", + "2" + ); - searchResponse = client().prepareSearch().setQuery(matchQuery("text", "unify").fuzziness(Fuzziness.fromString("AUTO:5,7"))).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "2"); + assertHitCount(prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.fromString("AUTO:5,7"))), 0L); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(matchQuery("text", "unify").fuzziness(Fuzziness.fromString("AUTO:5,7"))), + "2" + ); } public void testMultiMatchQuery() throws Exception { @@ -631,40 +570,31 @@ public void testMultiMatchQuery() throws Exception { ); MultiMatchQueryBuilder builder = multiMatchQuery("value1 value2 value4", "field1", "field2"); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(builder) - .addAggregation(AggregationBuilders.terms("field1").field("field1.keyword")) - .get(); - - assertHitCount(searchResponse, 2L); // this uses dismax so scores are equal and the order can be arbitrary - assertSearchHits(searchResponse, "1", "2"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(builder).addAggregation(AggregationBuilders.terms("field1").field("field1.keyword")), + "1", + "2" + ); - searchResponse = client().prepareSearch().setQuery(builder).get(); - - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(builder), "1", "2"); indicesAdmin().prepareRefresh("test").get(); builder = multiMatchQuery("value1", "field1", "field2").operator(Operator.AND); // Operator only applies on terms inside a field! // Fields are always OR-ed together. - searchResponse = client().prepareSearch().setQuery(builder).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(builder), "1"); refresh(); builder = multiMatchQuery("value1", "field1").field("field3", 1.5f).operator(Operator.AND); // Operator only applies on terms inside // a field! Fields are always OR-ed // together. - searchResponse = client().prepareSearch().setQuery(builder).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "3", "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(builder), "3", "1"); indicesAdmin().prepareRefresh("test").get(); builder = multiMatchQuery("value1").field("field1").field("field3", 1.5f).operator(Operator.AND); // Operator only applies on terms // inside a field! Fields are // always OR-ed together. - searchResponse = client().prepareSearch().setQuery(builder).get(); + SearchResponse searchResponse = prepareSearch().setQuery(builder).get(); assertHitCount(searchResponse, 2L); assertSearchHits(searchResponse, "3", "1"); @@ -679,7 +609,7 @@ public void testMultiMatchQuery() throws Exception { Matcher reasonMatcher = containsString("NumberFormatException: For input string: \"value1\""); ShardSearchFailure[] shardFailures; try { - client().prepareSearch().setQuery(builder).get(); + prepareSearch().setQuery(builder).get(); shardFailures = searchResponse.getShardFailures(); assertThat("Expected shard failures, got none", shardFailures, not(emptyArray())); } catch (SearchPhaseExecutionException e) { @@ -693,9 +623,7 @@ public void testMultiMatchQuery() throws Exception { } builder.lenient(true); - searchResponse = client().prepareSearch().setQuery(builder).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(builder), "1"); } public void testMatchQueryZeroTermsQuery() { @@ -706,14 +634,14 @@ public void testMatchQueryZeroTermsQuery() { BoolQueryBuilder boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(ZeroTermsQueryOption.NONE)) .must(matchQuery("field1", "value1").zeroTermsQuery(ZeroTermsQueryOption.NONE)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 0L); + assertHitCount(prepareSearch().setQuery(boolQuery), 0L); boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(ZeroTermsQueryOption.ALL)) .must(matchQuery("field1", "value1").zeroTermsQuery(ZeroTermsQueryOption.ALL)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 1L); + assertHitCount(prepareSearch().setQuery(boolQuery), 1L); boolQuery = boolQuery().must(matchQuery("field1", "a").zeroTermsQuery(ZeroTermsQueryOption.ALL)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 2L); + assertHitCount(prepareSearch().setQuery(boolQuery), 2L); } public void testMultiMatchQueryZeroTermsQuery() { @@ -725,14 +653,14 @@ public void testMultiMatchQueryZeroTermsQuery() { BoolQueryBuilder boolQuery = boolQuery().must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(ZeroTermsQueryOption.NONE)) // Fields are ORed together .must(multiMatchQuery("value1", "field1", "field2").zeroTermsQuery(ZeroTermsQueryOption.NONE)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 0L); + assertHitCount(prepareSearch().setQuery(boolQuery), 0L); boolQuery = boolQuery().must(multiMatchQuery("a", "field1", "field2").zeroTermsQuery(ZeroTermsQueryOption.ALL)) .must(multiMatchQuery("value4", "field1", "field2").zeroTermsQuery(ZeroTermsQueryOption.ALL)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 1L); + assertHitCount(prepareSearch().setQuery(boolQuery), 1L); boolQuery = boolQuery().must(multiMatchQuery("a", "field1").zeroTermsQuery(ZeroTermsQueryOption.ALL)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 2L); + assertHitCount(prepareSearch().setQuery(boolQuery), 2L); } public void testMultiMatchQueryMinShouldMatch() { @@ -744,39 +672,39 @@ public void testMultiMatchQueryMinShouldMatch() { MultiMatchQueryBuilder multiMatchQuery = multiMatchQuery("value1 value2 foo", "field1", "field2"); multiMatchQuery.minimumShouldMatch("70%"); - SearchResponse searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + SearchResponse searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); multiMatchQuery.minimumShouldMatch("30%"); - searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); multiMatchQuery.minimumShouldMatch("70%"); - searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); multiMatchQuery.minimumShouldMatch("30%"); - searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); multiMatchQuery = multiMatchQuery("value1 value2 bar", "field1"); multiMatchQuery.minimumShouldMatch("100%"); - assertHitCount(client().prepareSearch().setQuery(multiMatchQuery), 0L); + assertHitCount(prepareSearch().setQuery(multiMatchQuery), 0L); multiMatchQuery.minimumShouldMatch("70%"); - searchResponse = client().prepareSearch().setQuery(multiMatchQuery).get(); + searchResponse = prepareSearch().setQuery(multiMatchQuery).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); // Min should match > # optional clauses returns no docs. multiMatchQuery = multiMatchQuery("value1 value2 value3", "field1", "field2"); multiMatchQuery.minimumShouldMatch("4"); - assertHitCount(client().prepareSearch().setQuery(multiMatchQuery), 0L); + assertHitCount(prepareSearch().setQuery(multiMatchQuery), 0L); } public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws IOException { @@ -787,7 +715,7 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws BoolQueryBuilder boolQuery = boolQuery().must(termQuery("field1", "value1")) .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)); - SearchResponse searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + SearchResponse searchResponse = prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); @@ -795,18 +723,18 @@ public void testBoolQueryMinShouldMatchBiggerThanNumberOfShouldClauses() throws .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(1)) // Only one should clause is defined, returns no docs. .minimumShouldMatch(2); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 0L); + assertHitCount(prepareSearch().setQuery(boolQuery), 0L); boolQuery = boolQuery().should(termQuery("field1", "value1")) .should(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)) .minimumShouldMatch(1); - searchResponse = client().prepareSearch().setQuery(boolQuery).get(); + searchResponse = prepareSearch().setQuery(boolQuery).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); boolQuery = boolQuery().must(termQuery("field1", "value1")) .must(boolQuery().should(termQuery("field1", "value1")).should(termQuery("field1", "value2")).minimumShouldMatch(3)); - assertHitCount(client().prepareSearch().setQuery(boolQuery), 0L); + assertHitCount(prepareSearch().setQuery(boolQuery), 0L); } public void testFuzzyQueryString() { @@ -815,7 +743,7 @@ public void testFuzzyQueryString() { client().prepareIndex("test").setId("2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("str:kimcy~1")).get(); + SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("str:kimcy~1")).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); @@ -836,9 +764,9 @@ public void testQuotedQueryStringWithBoost() throws InterruptedException { client().prepareIndex("test").setId("2").setSource("important", "nothing important", "less_important", "phrase match") ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(queryStringQuery("\"phrase match\"").field("important", boost).field("less_important")) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + queryStringQuery("\"phrase match\"").field("important", boost).field("less_important") + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); @@ -854,20 +782,20 @@ public void testSpecialRangeSyntaxInQueryString() { client().prepareIndex("test").setId("2").setSource("str", "shay", "date", "2012-02-05", "num", 20).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>19")).get(); + SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("num:>19")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("num:>20")), 0L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("num:>20")), 0L); - searchResponse = client().prepareSearch().setQuery(queryStringQuery("num:>=20")).get(); + searchResponse = prepareSearch().setQuery(queryStringQuery("num:>=20")).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("2")); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("num:>11")), 2L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("num:<20")), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("num:<=20")), 2L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("+num:>11 +num:<20")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("num:>11")), 2L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("num:<20")), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("num:<=20")), 2L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("+num:>11 +num:<20")), 1L); } public void testEmptytermsQuery() throws Exception { @@ -880,8 +808,8 @@ public void testEmptytermsQuery() throws Exception { client().prepareIndex("test").setId("3").setSource("term", "3"), client().prepareIndex("test").setId("4").setSource("term", "4") ); - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("term", new String[0]))), 0L); - assertHitCount(client().prepareSearch("test").setQuery(idsQuery()), 0L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("term", new String[0]))), 0L); + assertHitCount(prepareSearch("test").setQuery(idsQuery()), 0L); } public void testTermsQuery() throws Exception { @@ -894,48 +822,35 @@ public void testTermsQuery() throws Exception { client().prepareIndex("test").setId("3").setSource("str", "3", "lng", 3L, "dbl", 3.0d), client().prepareIndex("test").setId("4").setSource("str", "4", "lng", 4L, "dbl", 4.0d) ); - - SearchResponse searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "1", "4"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "4"); - - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new long[] { 2, 3 }))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "2", "3"); - - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new double[] { 2, 3 }))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "2", "3"); - - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new int[] { 1, 3 }))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); - - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new float[] { 2, 4 }))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "2", "4"); - + assertSearchHitsWithoutFailures(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "1", "4"))), "1", "4"); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new long[] { 2, 3 }))), + "2", + "3" + ); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new double[] { 2, 3 }))), + "2", + "3" + ); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new int[] { 1, 3 }))), + "1", + "3" + ); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new float[] { 2, 4 }))), + "2", + "4" + ); // test partial matching - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "2", "5"))).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new double[] { 2, 5 }))).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new long[] { 2, 5 }))).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); - + assertSearchHitsWithoutFailures(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "2", "5"))), "2"); + assertSearchHitsWithoutFailures(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new double[] { 2, 5 }))), "2"); + assertSearchHitsWithoutFailures(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new long[] { 2, 5 }))), "2"); // test valid type, but no matching terms - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "5", "6"))), 0L); - - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new double[] { 5, 6 }))), 0L); - assertHitCount(client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new long[] { 5, 6 }))), 0L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("str", "5", "6"))), 0L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("dbl", new double[] { 5, 6 }))), 0L); + assertHitCount(prepareSearch("test").setQuery(constantScoreQuery(termsQuery("lng", new long[] { 5, 6 }))), 0L); } public void testTermsLookupFilter() throws Exception { @@ -1014,64 +929,63 @@ public void testTermsLookupFilter() throws Exception { client().prepareIndex("test").setId("3").setSource("term", "3"), client().prepareIndex("test").setId("4").setSource("term", "4") ); - - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))) - .get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))), + "1", + "3" + ); // same as above, just on the _id... - searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("_id", new TermsLookup("lookup", "1", "terms"))), + "1", + "3" + ); // another search with same parameters... - searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); - - searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "1", "terms"))), + "1", + "3" + ); - searchResponse = client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "2", "4"); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "2", "terms"))), + "2" + ); - assertHitCount(client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))), 0L); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "3", "terms"))), + "2", + "4" + ); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term"))) - .get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "3"); + assertHitCount(prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "4", "terms"))), 0L); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term"))) - .get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("2")); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "1", "arr.term"))), + "1", + "3" + ); - searchResponse = client().prepareSearch("test") - .setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term"))) - .get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "2", "4"); + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "2", "arr.term"))), + "2" + ); - assertHitCount( - client().prepareSearch("test").setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term"))), - 0L + assertSearchHitsWithoutFailures( + prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup2", "3", "arr.term"))), + "2", + "4" ); + assertHitCount(prepareSearch("test").setQuery(termsLookupQuery("not_exists", new TermsLookup("lookup2", "3", "arr.term"))), 0L); + // index "lookup" id "missing" document does not exist: ignore the lookup terms - assertHitCount( - client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "missing", "terms"))), - 0L - ); + assertHitCount(prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup", "missing", "terms"))), 0L); // index "lookup3" has the source disabled: ignore the lookup terms - assertHitCount(client().prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "1", "terms"))), 0L); + assertHitCount(prepareSearch("test").setQuery(termsLookupQuery("term", new TermsLookup("lookup3", "1", "terms"))), 0L); } public void testBasicQueryById() throws Exception { @@ -1082,23 +996,23 @@ public void testBasicQueryById() throws Exception { client().prepareIndex("test").setId("3").setSource("field1", "value3").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); + SearchResponse searchResponse = prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); assertHitCount(searchResponse, 2L); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get(); + searchResponse = prepareSearch().setQuery(idsQuery().addIds("1")).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); + searchResponse = prepareSearch().setQuery(idsQuery().addIds("1", "2")).get(); assertHitCount(searchResponse, 2L); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get(); + searchResponse = prepareSearch().setQuery(idsQuery().addIds("1")).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")).get(); + searchResponse = prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")).get(); assertHitCount(searchResponse, 3L); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); } @@ -1139,82 +1053,82 @@ public void testNumericTermsAndRanges() throws Exception { SearchResponse searchResponse; logger.info("--> term query on 1"); - searchResponse = client().prepareSearch("test").setQuery(termQuery("num_byte", 1)).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("num_byte", 1)).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termQuery("num_short", 1)).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("num_short", 1)).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termQuery("num_integer", 1)).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("num_integer", 1)).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termQuery("num_long", 1)).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("num_long", 1)).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termQuery("num_float", 1)).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("num_float", 1)).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termQuery("num_double", 1)).get(); + searchResponse = prepareSearch("test").setQuery(termQuery("num_double", 1)).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); logger.info("--> terms query on 1"); - searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_byte", new int[] { 1 })).get(); + searchResponse = prepareSearch("test").setQuery(termsQuery("num_byte", new int[] { 1 })).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_short", new int[] { 1 })).get(); + searchResponse = prepareSearch("test").setQuery(termsQuery("num_short", new int[] { 1 })).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_integer", new int[] { 1 })).get(); + searchResponse = prepareSearch("test").setQuery(termsQuery("num_integer", new int[] { 1 })).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_long", new int[] { 1 })).get(); + searchResponse = prepareSearch("test").setQuery(termsQuery("num_long", new int[] { 1 })).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_float", new double[] { 1 })).get(); + searchResponse = prepareSearch("test").setQuery(termsQuery("num_float", new double[] { 1 })).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(termsQuery("num_double", new double[] { 1 })).get(); + searchResponse = prepareSearch("test").setQuery(termsQuery("num_double", new double[] { 1 })).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); logger.info("--> term filter on 1"); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_byte", 1))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_byte", 1))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_short", 1))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_short", 1))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_integer", 1))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_integer", 1))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_long", 1))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_long", 1))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_float", 1))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_float", 1))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_double", 1))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termQuery("num_double", 1))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); logger.info("--> terms filter on 1"); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_byte", new int[] { 1 }))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_byte", new int[] { 1 }))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_short", new int[] { 1 }))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_short", new int[] { 1 }))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_integer", new int[] { 1 }))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_integer", new int[] { 1 }))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_long", new int[] { 1 }))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_long", new int[] { 1 }))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_float", new int[] { 1 }))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_float", new int[] { 1 }))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); - searchResponse = client().prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_double", new int[] { 1 }))).get(); + searchResponse = prepareSearch("test").setQuery(constantScoreQuery(termsQuery("num_double", new int[] { 1 }))).get(); assertHitCount(searchResponse, 1L); assertFirstHit(searchResponse, hasId("1")); } @@ -1243,30 +1157,34 @@ public void testNumericRangeFilter_2826() throws Exception { client().prepareIndex("test").setId("4").setSource("field1", "test2", "num_long", 4).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setPostFilter(boolQuery().should(rangeQuery("num_long").from(1).to(2)).should(rangeQuery("num_long").from(3).to(4))) - .get(); - assertHitCount(searchResponse, 4L); + assertHitCount( + prepareSearch("test").setPostFilter( + boolQuery().should(rangeQuery("num_long").from(1).to(2)).should(rangeQuery("num_long").from(3).to(4)) + ), + 4L + ); // This made 2826 fail! (only with bit based filters) - searchResponse = client().prepareSearch("test") - .setPostFilter(boolQuery().should(rangeQuery("num_long").from(1).to(2)).should(rangeQuery("num_long").from(3).to(4))) - .get(); - assertHitCount(searchResponse, 4L); + assertHitCount( + prepareSearch("test").setPostFilter( + boolQuery().should(rangeQuery("num_long").from(1).to(2)).should(rangeQuery("num_long").from(3).to(4)) + ), + 4L + ); // This made #2979 fail! - searchResponse = client().prepareSearch("test") - .setPostFilter( + assertHitCount( + prepareSearch("test").setPostFilter( boolQuery().must(termQuery("field1", "test1")) .should(rangeQuery("num_long").from(1).to(2)) .should(rangeQuery("num_long").from(3).to(4)) - ) - .get(); - assertHitCount(searchResponse, 2L); + ), + 2L + ); } // see #2926 - public void testMustNot() throws IOException, ExecutionException, InterruptedException { + public void testMustNot() throws InterruptedException { assertAcked( prepareCreate("test") // issue manifested only with shards>=2 @@ -1281,17 +1199,12 @@ public void testMustNot() throws IOException, ExecutionException, InterruptedExc client().prepareIndex("test").setId("4").setSource("description", "foo") ); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .get(); - assertHitCount(searchResponse, 4L); - - searchResponse = client().prepareSearch("test") - .setQuery(boolQuery().mustNot(matchQuery("description", "anything"))) - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .get(); - assertHitCount(searchResponse, 2L); + assertHitCount(prepareSearch("test").setQuery(matchAllQuery()).setSearchType(SearchType.DFS_QUERY_THEN_FETCH), 4L); + assertHitCount( + prepareSearch("test").setQuery(boolQuery().mustNot(matchQuery("description", "anything"))) + .setSearchType(SearchType.DFS_QUERY_THEN_FETCH), + 2L + ); } public void testIntervals() throws InterruptedException { @@ -1325,7 +1238,7 @@ public void testIntervals() throws InterruptedException { } } }"""; - SearchResponse response = client().prepareSearch("test").setQuery(wrapperQuery(json)).get(); + SearchResponse response = prepareSearch("test").setQuery(wrapperQuery(json)).get(); assertHitCount(response, 1L); } @@ -1341,10 +1254,11 @@ public void testSimpleSpan() throws IOException, ExecutionException, Interrupted client().prepareIndex("test").setId("4").setSource("description", "foo") ); - assertHitCount(client().prepareSearch("test").setQuery(spanOrQuery(spanTermQuery("description", "bar"))), 1L); + assertHitCount(prepareSearch("test").setQuery(spanOrQuery(spanTermQuery("description", "bar"))), 1L); assertHitCount( - client().prepareSearch("test") - .setQuery(spanNearQuery(spanTermQuery("description", "foo"), 3).addClause(spanTermQuery("description", "other"))), + prepareSearch("test").setQuery( + spanNearQuery(spanTermQuery("description", "foo"), 3).addClause(spanTermQuery("description", "other")) + ), 3L ); } @@ -1358,27 +1272,16 @@ public void testSpanMultiTermQuery() throws IOException { client().prepareIndex("test").setId("4").setSource("description", "fop", "count", 4).get(); refresh(); + assertHitCount(prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(fuzzyQuery("description", "fop")))), 4); + assertHitCount(prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(prefixQuery("description", "fo")))), 4); + assertHitCount(prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(wildcardQuery("description", "oth*")))), 3); assertHitCount( - client().prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(fuzzyQuery("description", "fop")))), - 4 - ); - assertHitCount( - client().prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(prefixQuery("description", "fo")))), - 4 - ); - assertHitCount( - client().prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(wildcardQuery("description", "oth*")))), - 3 - ); - assertHitCount( - client().prepareSearch("test") - .setQuery(spanOrQuery(spanMultiTermQueryBuilder(QueryBuilders.rangeQuery("description").from("ffa").to("foo")))), - 3 - ); - assertHitCount( - client().prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(regexpQuery("description", "fo{2}")))), + prepareSearch("test").setQuery( + spanOrQuery(spanMultiTermQueryBuilder(QueryBuilders.rangeQuery("description").from("ffa").to("foo"))) + ), 3 ); + assertHitCount(prepareSearch("test").setQuery(spanOrQuery(spanMultiTermQueryBuilder(regexpQuery("description", "fo{2}")))), 3); } public void testSpanNot() throws IOException, ExecutionException, InterruptedException { @@ -1389,39 +1292,36 @@ public void testSpanNot() throws IOException, ExecutionException, InterruptedExc refresh(); assertHitCount( - client().prepareSearch("test") - .setQuery( - spanNotQuery( - spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause( - QueryBuilders.spanTermQuery("description", "fox") - ), - spanTermQuery("description", "brown") - ) - ), + prepareSearch("test").setQuery( + spanNotQuery( + spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause( + QueryBuilders.spanTermQuery("description", "fox") + ), + spanTermQuery("description", "brown") + ) + ), 1L ); assertHitCount( - client().prepareSearch("test") - .setQuery( - spanNotQuery( - spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause( - QueryBuilders.spanTermQuery("description", "fox") - ), - spanTermQuery("description", "sleeping") - ).dist(5) - ), + prepareSearch("test").setQuery( + spanNotQuery( + spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause( + QueryBuilders.spanTermQuery("description", "fox") + ), + spanTermQuery("description", "sleeping") + ).dist(5) + ), 1L ); assertHitCount( - client().prepareSearch("test") - .setQuery( - spanNotQuery( - spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause( - QueryBuilders.spanTermQuery("description", "fox") - ), - spanTermQuery("description", "jumped") - ).pre(1).post(1) - ), + prepareSearch("test").setQuery( + spanNotQuery( + spanNearQuery(QueryBuilders.spanTermQuery("description", "quick"), 1).addClause( + QueryBuilders.spanTermQuery("description", "fox") + ), + spanTermQuery("description", "jumped") + ).pre(1).post(1) + ), 1L ); } @@ -1475,8 +1375,7 @@ public void testSimpleDFSQuery() throws IOException { refresh(); assertNoFailures( - client().prepareSearch("test") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery( boolQuery().must(termQuery("online", true)) .must( @@ -1501,20 +1400,20 @@ public void testMultiFieldQueryString() { client().prepareIndex("test").setId("1").setSource("field1", "value1", "field2", "value2").setRefreshPolicy(IMMEDIATE).get(); logger.info("regular"); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("value1").field("field1").field("field2")), 1); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("field\\*:value1")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("value1").field("field1").field("field2")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("field\\*:value1")), 1); logger.info("prefix"); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("value*").field("field1").field("field2")), 1); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("field\\*:value*")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("value*").field("field1").field("field2")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("field\\*:value*")), 1); logger.info("wildcard"); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("v?lue*").field("field1").field("field2")), 1); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("field\\*:v?lue*")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("v?lue*").field("field1").field("field2")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("field\\*:v?lue*")), 1); logger.info("fuzzy"); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("value~").field("field1").field("field2")), 1); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("field\\*:value~")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("value~").field("field1").field("field2")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("field\\*:value~")), 1); logger.info("regexp"); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("/value[01]/").field("field1").field("field2")), 1); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("field\\*:/value[01]/")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("/value[01]/").field("field1").field("field2")), 1); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("field\\*:/value[01]/")), 1); } // see #3797 @@ -1524,9 +1423,9 @@ public void testMultiMatchLenientIssue3797() { client().prepareIndex("test").setId("1").setSource("field1", 123, "field2", "value2").get(); refresh(); - assertHitCount(client().prepareSearch("test").setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true)), 1L); - assertHitCount(client().prepareSearch("test").setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true)), 1L); - assertHitCount(client().prepareSearch("test").setQuery(multiMatchQuery("value2").field("field2", 2).lenient(true)), 1L); + assertHitCount(prepareSearch("test").setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true)), 1L); + assertHitCount(prepareSearch("test").setQuery(multiMatchQuery("value2", "field2").field("field1", 2).lenient(true)), 1L); + assertHitCount(prepareSearch("test").setQuery(multiMatchQuery("value2").field("field2", 2).lenient(true)), 1L); } public void testMinScore() throws ExecutionException, InterruptedException { @@ -1538,9 +1437,9 @@ public void testMinScore() throws ExecutionException, InterruptedException { client().prepareIndex("test").setId("4").setSource("score", 0.5).get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(functionScoreQuery(ScoreFunctionBuilders.fieldValueFactorFunction("score").missing(1.0)).setMinScore(1.5f)) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + functionScoreQuery(ScoreFunctionBuilders.fieldValueFactorFunction("score").missing(1.0)).setMinScore(1.5f) + ).get(); assertHitCount(searchResponse, 2); assertFirstHit(searchResponse, hasId("3")); assertSecondHit(searchResponse, hasId("1")); @@ -1553,28 +1452,24 @@ public void testQueryStringWithSlopAndFields() { client().prepareIndex("test").setId("2").setSource("desc", "one two three", "type", "product").get(); refresh(); - assertHitCount(client().prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("\"one two\"").defaultField("desc")), 2); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("\"one two\"").defaultField("desc")), 2); assertHitCount( - client().prepareSearch("test") - .setPostFilter(QueryBuilders.termQuery("type", "customer")) + prepareSearch("test").setPostFilter(QueryBuilders.termQuery("type", "customer")) .setQuery(QueryBuilders.queryStringQuery("\"one two\"").field("desc")), 1 ); assertHitCount( - client().prepareSearch("test") - .setPostFilter(QueryBuilders.termQuery("type", "product")) + prepareSearch("test").setPostFilter(QueryBuilders.termQuery("type", "product")) .setQuery(QueryBuilders.queryStringQuery("\"one three\"~5").field("desc")), 1 ); assertHitCount( - client().prepareSearch("test") - .setPostFilter(QueryBuilders.termQuery("type", "customer")) + prepareSearch("test").setPostFilter(QueryBuilders.termQuery("type", "customer")) .setQuery(QueryBuilders.queryStringQuery("\"one two\"").defaultField("desc")), 1 ); assertHitCount( - client().prepareSearch("test") - .setPostFilter(QueryBuilders.termQuery("type", "customer")) + prepareSearch("test").setPostFilter(QueryBuilders.termQuery("type", "customer")) .setQuery(QueryBuilders.queryStringQuery("\"one two\"").defaultField("desc")), 1 ); @@ -1593,8 +1488,8 @@ public void testDateProvidedAsNumber() throws InterruptedException { client().prepareIndex("test").setId("6").setSource("field", 999999999999L) ); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(1000000000000L)), 4); - assertHitCount(client().prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(999999999999L)), 6); + assertHitCount(prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(1000000000000L)), 4); + assertHitCount(prepareSearch("test").setSize(0).setQuery(rangeQuery("field").gte(999999999999L)), 6); } public void testRangeQueryWithTimeZone() throws Exception { @@ -1611,65 +1506,63 @@ public void testRangeQueryWithTimeZone() throws Exception { .setSource("date", Instant.now().atZone(ZoneOffset.ofHours(1)).toInstant().toEpochMilli(), "num", 4) ); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00")) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00").to("2014-01-01T00:59:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("1")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00").to("2013-12-31T23:59:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("2")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00").to("2014-01-01T01:59:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); // We explicitly define a time zone in the from/to dates so whatever the time zone is, it won't be used - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00Z").to("2014-01-01T00:59:00Z").timeZone("+10:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T00:00:00Z").to("2014-01-01T00:59:00Z").timeZone("+10:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("1")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2013-12-31T23:00:00Z").to("2013-12-31T23:59:00Z").timeZone("+10:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("2")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T01:00:00Z").to("2014-01-01T01:59:00Z").timeZone("+10:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); // We define a time zone to be applied to the filter and from/to have no time zone - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T03:00:00").to("2014-01-01T03:59:00").timeZone("+03:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("1")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T02:00:00").to("2014-01-01T02:59:00").timeZone("+03:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("2")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01T04:00:00").to("2014-01-01T04:59:00").timeZone("+03:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01T04:00:00").to("2014-01-01T04:59:00").timeZone("+03:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00")) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date").from("2014-01-01").to("2014-01-01T00:59:00").timeZone("-01:00") + ).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("3")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("date").from("now/d-1d").timeZone("+01:00")) - .get(); + searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("date").from("now/d-1d").timeZone("+01:00")).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), is("4")); } @@ -1707,17 +1600,15 @@ public void testRangeQueryWithLocaleMapping() throws Exception { ); assertHitCount( - client().prepareSearch("test") - .setQuery( - QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Do, 07 Dez 2000 00:00:00 -0800") - ), + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Do, 07 Dez 2000 00:00:00 -0800") + ), 1L ); assertHitCount( - client().prepareSearch("test") - .setQuery( - QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Fr, 08 Dez 2000 00:00:00 -0800") - ), + prepareSearch("test").setQuery( + QueryBuilders.rangeQuery("date_field").gte("Di, 05 Dez 2000 02:55:00 -0800").lte("Fr, 08 Dez 2000 00:00:00 -0800") + ), 2L ); } @@ -1726,7 +1617,7 @@ public void testSearchEmptyDoc() { client().prepareIndex("test").setId("1").setSource("{}", XContentType.JSON).get(); refresh(); - assertHitCount(client().prepareSearch().setQuery(matchAllQuery()), 1L); + assertHitCount(prepareSearch().setQuery(matchAllQuery()), 1L); } public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedException { @@ -1737,25 +1628,16 @@ public void testMatchPhrasePrefixQuery() throws ExecutionException, InterruptedE client().prepareIndex("test1").setId("2").setSource("field", "trying out Elasticsearch") ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchPhrasePrefixQuery("field", "Johnnie la").slop(between(2, 5))) - .get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - searchResponse = client().prepareSearch().setQuery(matchPhrasePrefixQuery("field", "trying")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "2"); - searchResponse = client().prepareSearch().setQuery(matchPhrasePrefixQuery("field", "try")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(matchPhrasePrefixQuery("field", "Johnnie la").slop(between(2, 5))), "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(matchPhrasePrefixQuery("field", "trying")), "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(matchPhrasePrefixQuery("field", "try")), "2"); } public void testQueryStringParserCache() throws Exception { createIndex("test"); indexRandom(true, false, client().prepareIndex("test").setId("1").setSource("nameTokens", "xyz")); - SearchResponse response = client().prepareSearch("test") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)) .get(); assertThat(response.getHits().getTotalHits().value, equalTo(1L)); @@ -1763,8 +1645,7 @@ public void testQueryStringParserCache() throws Exception { float first = response.getHits().getAt(0).getScore(); for (int i = 0; i < 100; i++) { - response = client().prepareSearch("test") - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + response = prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)) .get(); @@ -1785,7 +1666,7 @@ public void testRangeQueryRangeFields_24744() throws Exception { refresh(); RangeQueryBuilder range = new RangeQueryBuilder("int_range").relation("intersects").from(Integer.MIN_VALUE).to(Integer.MAX_VALUE); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); + SearchResponse searchResponse = prepareSearch("test").setQuery(range).get(); assertHitCount(searchResponse, 1); } @@ -1826,7 +1707,7 @@ public void testNestedQueryWithFieldAlias() throws Exception { QueryBuilders.termQuery("section.route_length_miles", 42), ScoreMode.Max ); - assertHitCount(client().prepareSearch("index").setQuery(nestedQuery), 1); + assertHitCount(prepareSearch("index").setQuery(nestedQuery), 1); } public void testFieldAliasesForMetaFields() throws Exception { @@ -1851,8 +1732,7 @@ public void testFieldAliasesForMetaFields() throws Exception { indexRandom(true, false, indexRequest); updateClusterSettings(Settings.builder().put(IndicesService.INDICES_ID_FIELD_DATA_ENABLED_SETTING.getKey(), true)); try { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(termQuery("routing-alias", "custom")) + SearchResponse searchResponse = prepareSearch().setQuery(termQuery("routing-alias", "custom")) .addDocValueField("id-alias") .get(); assertHitCount(searchResponse, 1L); @@ -1887,10 +1767,10 @@ public void testWildcardQueryNormalizationOnKeywordField() { { WildcardQueryBuilder wildCardQuery = wildcardQuery("field1", "Bb*"); - assertHitCount(client().prepareSearch().setQuery(wildCardQuery), 1L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 1L); wildCardQuery = wildcardQuery("field1", "bb*"); - assertHitCount(client().prepareSearch().setQuery(wildCardQuery), 1L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 1L); } } @@ -1912,17 +1792,14 @@ public void testWildcardQueryNormalizationOnTextField() { { WildcardQueryBuilder wildCardQuery = wildcardQuery("field1", "Bb*"); - SearchResponse searchResponse = client().prepareSearch().setQuery(wildCardQuery).get(); - assertHitCount(searchResponse, 0L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 0L); // the following works not because of normalization but because of the `case_insensitive` parameter wildCardQuery = wildcardQuery("field1", "Bb*").caseInsensitive(true); - searchResponse = client().prepareSearch().setQuery(wildCardQuery).get(); - assertHitCount(searchResponse, 1L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 1L); wildCardQuery = wildcardQuery("field1", "bb*"); - searchResponse = client().prepareSearch().setQuery(wildCardQuery).get(); - assertHitCount(searchResponse, 1L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 1L); } } @@ -1944,12 +1821,10 @@ public void testWildcardQueryNormalizationKeywordSpecialCharacters() { refresh(); WildcardQueryBuilder wildCardQuery = wildcardQuery("field", "la*"); - SearchResponse searchResponse = client().prepareSearch().setQuery(wildCardQuery).get(); - assertHitCount(searchResponse, 1L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 1L); wildCardQuery = wildcardQuery("field", "la*el-?"); - searchResponse = client().prepareSearch().setQuery(wildCardQuery).get(); - assertHitCount(searchResponse, 1L); + assertHitCount(prepareSearch().setQuery(wildCardQuery), 1L); } public static class MockAnalysisPlugin extends Plugin implements AnalysisPlugin { @@ -1999,7 +1874,7 @@ public void testIssueFuzzyInsideSpanMulti() { refresh(); BoolQueryBuilder query = boolQuery().filter(spanMultiTermQueryBuilder(fuzzyQuery("field", "foobarbiz").rewrite("constant_score"))); - assertHitCount(client().prepareSearch("test").setQuery(query), 1); + assertHitCount(prepareSearch("test").setQuery(query), 1); } public void testFetchIdFieldQuery() { @@ -2011,7 +1886,7 @@ public void testFetchIdFieldQuery() { ensureGreen(); refresh(); - SearchResponse response = client().prepareSearch("test").addFetchField("_id").setSize(docCount).get(); + SearchResponse response = prepareSearch("test").addFetchField("_id").setSize(docCount).get(); SearchHit[] hits = response.getHits().getHits(); assertEquals(docCount, hits.length); for (SearchHit hit : hits) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 930bc565969cc..78d98b76b9bc8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -51,6 +51,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -84,41 +85,30 @@ public void testSimpleQueryString() throws ExecutionException, InterruptedExcept client().prepareIndex("test").setId("6").setSource("otherbody", "spaghetti") ); - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar")).get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar")), "1", "2", "3"); // Tests boost value setting. In this case doc 1 should always be ranked above the other // two matches. - searchResponse = client().prepareSearch() - .setQuery(boolQuery().should(simpleQueryStringQuery("\"foo bar\"").boost(10.0f)).should(termQuery("body", "eggplant"))) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery( + boolQuery().should(simpleQueryStringQuery("\"foo bar\"").boost(10.0f)).should(termQuery("body", "eggplant")) + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("3")); - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").defaultOperator(Operator.AND)).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("3")); - - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("\"quux baz\" +(eggplant | spaghetti)")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "4", "5"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar").defaultOperator(Operator.AND)), "3"); - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("eggplants").analyzer("mock_snowball")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("4")); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("\"quux baz\" +(eggplant | spaghetti)")), "4", "5"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("eggplants").analyzer("mock_snowball")), "4"); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("spaghetti").field("body", 1000.0f).field("otherbody", 2.0f).queryName("myquery")) - .get(); + searchResponse = prepareSearch().setQuery( + simpleQueryStringQuery("spaghetti").field("body", 1000.0f).field("otherbody", 2.0f).queryName("myquery") + ).get(); assertHitCount(searchResponse, 2L); assertFirstHit(searchResponse, hasId("5")); assertSearchHits(searchResponse, "5", "6"); assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("myquery")); - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("spaghetti").field("*body")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "5", "6"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("spaghetti").field("*body")), "5", "6"); } public void testSimpleQueryStringMinimumShouldMatch() throws Exception { @@ -134,31 +124,30 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { ); logger.info("--> query 1"); - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "3", "4"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")), "3", "4"); logger.info("--> query 2"); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")) - .get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "3", "4"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")), + "3", + "4" + ); // test case from #13884 logger.info("--> query 3"); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo").field("body").field("body2").field("body3").minimumShouldMatch("-50%")) - .get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "3", "4"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo").field("body").field("body2").field("body3").minimumShouldMatch("-50%")), + "1", + "3", + "4" + ); logger.info("--> query 4"); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo bar baz").field("body").field("body2").minimumShouldMatch("70%")) - .get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "3", "4"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body").field("body2").minimumShouldMatch("70%")), + "3", + "4" + ); indexRandom( true, @@ -170,23 +159,31 @@ public void testSimpleQueryStringMinimumShouldMatch() throws Exception { ); logger.info("--> query 5"); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")) - .get(); - assertHitCount(searchResponse, 4L); - assertSearchHits(searchResponse, "3", "4", "7", "8"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo bar").field("body").field("body2").minimumShouldMatch("2")), + "3", + "4", + "7", + "8" + ); logger.info("--> query 6"); - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")).get(); - assertHitCount(searchResponse, 5L); - assertSearchHits(searchResponse, "3", "4", "6", "7", "8"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo bar").minimumShouldMatch("2")), + "3", + "4", + "6", + "7", + "8" + ); logger.info("--> query 7"); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo bar baz").field("body2").field("other").minimumShouldMatch("70%")) - .get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "6", "7", "8"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body2").field("other").minimumShouldMatch("70%")), + "6", + "7", + "8" + ); } public void testNestedFieldSimpleQueryString() throws IOException { @@ -211,21 +208,10 @@ public void testNestedFieldSimpleQueryString() throws IOException { client().prepareIndex("test").setId("1").setSource("body", "foo bar baz").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); - - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")), "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")), "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")), "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")), "1"); } public void testSimpleQueryStringFlags() throws ExecutionException, InterruptedException { @@ -240,52 +226,50 @@ public void testSimpleQueryStringFlags() throws ExecutionException, InterruptedE client().prepareIndex("test").setId("6").setSource("otherbody", "spaghetti") ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo bar").flags(SimpleQueryStringFlag.ALL)) - .get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo bar").flags(SimpleQueryStringFlag.ALL)), + "1", + "2", + "3" + ); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo | bar").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.OR)) - .get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo | bar").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.OR)), + "1", + "2", + "3" + ); - searchResponse = client().prepareSearch() - .setQuery(simpleQueryStringQuery("foo | bar").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.NONE)) - .get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("3")); + assertSearchHitsWithoutFailures( + prepareSearch().setQuery(simpleQueryStringQuery("foo | bar").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.NONE)), + "3" + ); assertHitCount( - client().prepareSearch() - .setQuery(simpleQueryStringQuery("baz | egg*").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.NONE)), + prepareSearch().setQuery(simpleQueryStringQuery("baz | egg*").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.NONE)), 0L ); assertHitCount( - client().prepareSearch() - .setSource( - new SearchSourceBuilder().query( - QueryBuilders.simpleQueryStringQuery("foo|bar").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.NONE) - ) - ), + prepareSearch().setSource( + new SearchSourceBuilder().query( + QueryBuilders.simpleQueryStringQuery("foo|bar").defaultOperator(Operator.AND).flags(SimpleQueryStringFlag.NONE) + ) + ), 1L ); - searchResponse = client().prepareSearch() - .setQuery( + assertSearchHitsWithoutFailures( + prepareSearch().setQuery( simpleQueryStringQuery("quuz~1 + egg*").flags( SimpleQueryStringFlag.WHITESPACE, SimpleQueryStringFlag.AND, SimpleQueryStringFlag.FUZZY, SimpleQueryStringFlag.PREFIX ) - ) - .get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("4")); + ), + "4" + ); } public void testSimpleQueryStringLenient() throws ExecutionException, InterruptedException { @@ -297,18 +281,14 @@ public void testSimpleQueryStringLenient() throws ExecutionException, Interrupte ); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setAllowPartialSearchResults(true) + SearchResponse searchResponse = prepareSearch().setAllowPartialSearchResults(true) .setQuery(simpleQueryStringQuery("foo").field("field")) .get(); assertFailures(searchResponse); assertHitCount(searchResponse, 1L); assertSearchHits(searchResponse, "1"); - searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo").field("field").lenient(true)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("foo").field("field").lenient(true)), "1"); } // Issue #7967 @@ -320,12 +300,9 @@ public void testLenientFlagBeingTooLenient() throws Exception { ); BoolQueryBuilder q = boolQuery().should(simpleQueryStringQuery("bar").field("num").field("body").lenient(true)); - SearchResponse resp = client().prepareSearch("test").setQuery(q).get(); - assertNoFailures(resp); // the bug is that this would be parsed into basically a match_all // query and this would match both documents - assertHitCount(resp, 1); - assertSearchHits(resp, "1"); + assertSearchHitsWithoutFailures(prepareSearch("test").setQuery(q), "1"); } public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, InterruptedException, IOException { @@ -346,32 +323,21 @@ public void testSimpleQueryStringAnalyzeWildcard() throws ExecutionException, In indexRandom(true, client().prepareIndex("test1").setId("1").setSource("location", "Köln")); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("Köln*").field("location")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("Köln*").field("location")), "1"); } public void testSimpleQueryStringUsesFieldAnalyzer() throws Exception { client().prepareIndex("test").setId("1").setSource("foo", 123, "bar", "abc").get(); client().prepareIndex("test").setId("2").setSource("foo", 234, "bar", "bcd").get(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("123").field("foo").field("bar")).get(); - assertHitCount(searchResponse, 1L); - assertSearchHits(searchResponse, "1"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("123").field("foo").field("bar")), "1"); } public void testSimpleQueryStringOnIndexMetaField() throws Exception { client().prepareIndex("test").setId("1").setSource("foo", 123, "bar", "abc").get(); client().prepareIndex("test").setId("2").setSource("foo", 234, "bar", "bcd").get(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("test").field("_index")).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("test").field("_index")), "1", "2"); } public void testEmptySimpleQueryStringWithAnalysis() throws Exception { @@ -393,9 +359,7 @@ public void testEmptySimpleQueryStringWithAnalysis() throws Exception { indexRandom(true, client().prepareIndex("test1").setId("1").setSource("body", "Some Text")); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("the*").field("body")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 0L); + assertSearchHitsWithoutFailures(prepareSearch().setQuery(simpleQueryStringQuery("the*").field("body"))); } public void testBasicAllQuery() throws Exception { @@ -409,15 +373,15 @@ public void testBasicAllQuery() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f3", "foo bar baz")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); + SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); assertHitCount(resp, 2L); assertHits(resp.getHits(), "1", "3"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); assertHitCount(resp, 2L); assertHits(resp.getHits(), "1", "3"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); assertHitCount(resp, 3L); assertHits(resp.getHits(), "1", "2", "3"); } @@ -432,19 +396,19 @@ public void testWithDate() throws Exception { reqs.add(client().prepareIndex("test").setId("2").setSource("f1", "bar", "f_date", "2015/09/01")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); + SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); assertHits(resp.getHits(), "1"); assertHitCount(resp, 1L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("bar \"2015/09/02\"")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\" \"2015/09/01\"")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); } @@ -463,19 +427,19 @@ public void testWithLotsOfTypes() throws Exception { ); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); + SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo bar")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("\"2015/09/02\"")).get(); assertHits(resp.getHits(), "1"); assertHitCount(resp, 1L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.2 \"2015/09/02\"")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1 1.8")).get(); assertHits(resp.getHits(), "1", "2"); assertHitCount(resp, 2L); } @@ -490,38 +454,38 @@ public void testDocWithAllTypes() throws Exception { reqs.add(client().prepareIndex("test").setId("1").setSource(docBody, XContentType.JSON)); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); + SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("Bar")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("Baz")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("19")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("19")).get(); assertHits(resp.getHits(), "1"); // nested doesn't match because it's hidden - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1476383971")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1476383971")).get(); assertHits(resp.getHits(), "1"); // bool doesn't match - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("7")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("7")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("23")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("23")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1293")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1293")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("42")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("42")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1.7")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("1.5")).get(); assertHits(resp.getHits(), "1"); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("127.0.0.1")).get(); assertHits(resp.getHits(), "1"); // binary doesn't match // suggest doesn't match // geo_point doesn't match // geo_shape doesn't match - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo Bar 19 127.0.0.1").defaultOperator(Operator.AND)).get(); assertHits(resp.getHits(), "1"); } @@ -536,11 +500,11 @@ public void testKeywordWithWhitespace() throws Exception { reqs.add(client().prepareIndex("test").setId("3").setSource("f1", "foo bar")); indexRandom(true, false, reqs); - SearchResponse resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); + SearchResponse resp = prepareSearch("test").setQuery(simpleQueryStringQuery("foo")).get(); assertHits(resp.getHits(), "3"); assertHitCount(resp, 1L); - resp = client().prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); + resp = prepareSearch("test").setQuery(simpleQueryStringQuery("bar")).get(); assertHits(resp.getHits(), "2", "3"); assertHitCount(resp, 2L); } @@ -556,7 +520,7 @@ public void testAllFieldsWithSpecifiedLeniency() throws Exception { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test").setQuery(simpleQueryStringQuery("foo123").lenient(false)).get() + () -> prepareSearch("test").setQuery(simpleQueryStringQuery("foo123").lenient(false)).get() ); assertThat(e.getDetailedMessage(), containsString("NumberFormatException: For input string: \"foo123\"")); } @@ -572,7 +536,7 @@ public void testFieldAlias() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_alias")).get(); + SearchResponse response = prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_alias")).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -590,7 +554,7 @@ public void testFieldAliasWithWildcardField() throws Exception { indexRequests.add(client().prepareIndex("test").setId("3").setSource("f3", "another value", "f2", "three")); indexRandom(true, false, indexRequests); - SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_*")).get(); + SearchResponse response = prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("f3_*")).get(); assertNoFailures(response); assertHitCount(response, 2); @@ -608,7 +572,7 @@ public void testFieldAliasOnDisallowedFieldType() throws Exception { // The wildcard field matches aliases for both a text and boolean field. // By default, the boolean field should be ignored when building the query. - SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")).get(); + SearchResponse response = prepareSearch("test").setQuery(queryStringQuery("text").field("f*_alias")).get(); assertNoFailures(response); assertHitCount(response, 1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index cf2a7f130bd98..1d13bea9e0639 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -67,10 +67,10 @@ public void testStopOneNodePreferenceWithRedState() throws IOException { "_prefer_nodes:somenode,server2" }; for (String pref : preferences) { logger.info("--> Testing out preference={}", pref); - SearchResponse searchResponse = client().prepareSearch().setSize(0).setPreference(pref).get(); + SearchResponse searchResponse = prepareSearch().setSize(0).setPreference(pref).get(); assertThat(RestStatus.OK, equalTo(searchResponse.status())); assertThat(pref, searchResponse.getFailedShards(), greaterThanOrEqualTo(0)); - searchResponse = client().prepareSearch().setPreference(pref).get(); + searchResponse = prepareSearch().setPreference(pref).get(); assertThat(RestStatus.OK, equalTo(searchResponse.status())); assertThat(pref, searchResponse.getFailedShards(), greaterThanOrEqualTo(0)); } @@ -112,13 +112,13 @@ public void testSimplePreference() { client().prepareIndex("test").setSource("field1", "value1").get(); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("_local").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setPreference("_local").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setPreference("1234").get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setPreference("1234").get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } @@ -127,7 +127,7 @@ public void testThatSpecifyingNonExistingNodesReturnsUsefulError() { ensureGreen(); try { - client().prepareSearch().setQuery(matchAllQuery()).setPreference("_only_nodes:DOES-NOT-EXIST").get(); + prepareSearch().setQuery(matchAllQuery()).setPreference("_only_nodes:DOES-NOT-EXIST").get(); fail("Expected IllegalArgumentException"); } catch (IllegalArgumentException e) { assertThat(e, hasToString(containsString("no data nodes with criteria [DOES-NOT-EXIST] found for shard: [test]["))); @@ -217,8 +217,7 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { final String customPreference = randomAlphaOfLength(10); - final String nodeId = client().prepareSearch("test") - .setQuery(matchAllQuery()) + final String nodeId = prepareSearch("test").setQuery(matchAllQuery()) .setPreference(customPreference) .get() .getHits() @@ -260,7 +259,7 @@ public void testCustomPreferenceUnaffectedByOtherShardMovements() { } private static void assertSearchesSpecificNode(String index, String customPreference, String nodeId) { - final SearchResponse searchResponse = client().prepareSearch(index).setQuery(matchAllQuery()).setPreference(customPreference).get(); + final SearchResponse searchResponse = prepareSearch(index).setQuery(matchAllQuery()).setPreference(customPreference).get(); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getShard().getNodeId(), equalTo(nodeId)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 499aba8fd57d3..dc460468db605 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -114,10 +114,9 @@ public void testCustomScriptBinaryField() throws Exception { flush(); refresh(); - SearchResponse response = client().prepareSearch() - .setQuery( - scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length > 15", emptyMap())) - ) + SearchResponse response = prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length > 15", emptyMap())) + ) .addScriptField( "sbinaryData", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) @@ -169,8 +168,9 @@ public void testCustomScriptBoost() throws Exception { refresh(); logger.info("running doc['num1'].value > 1"); - SearchResponse response = client().prepareSearch() - .setQuery(scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", Collections.emptyMap()))) + SearchResponse response = prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", Collections.emptyMap())) + ) .addSort("num1", SortOrder.ASC) .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .get(); @@ -185,8 +185,9 @@ public void testCustomScriptBoost() throws Exception { params.put("param1", 2); logger.info("running doc['num1'].value > param1"); - response = client().prepareSearch() - .setQuery(scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params))) + response = prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params)) + ) .addSort("num1", SortOrder.ASC) .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .get(); @@ -198,8 +199,9 @@ public void testCustomScriptBoost() throws Exception { params = new HashMap<>(); params.put("param1", -1); logger.info("running doc['num1'].value > param1"); - response = client().prepareSearch() - .setQuery(scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params))) + response = prepareSearch().setQuery( + scriptQuery(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > param1", params)) + ) .addSort("num1", SortOrder.ASC) .addScriptField("sNum1", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap())) .get(); @@ -224,14 +226,14 @@ public void testDisallowExpensiveQueries() { // Execute with search.allow_expensive_queries = null => default value = false => success Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value > 1", Collections.emptyMap()); - assertNoFailures(client().prepareSearch("test-index").setQuery(scriptQuery(script))); + assertNoFailures(prepareSearch("test-index").setQuery(scriptQuery(script))); updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", false)); // Set search.allow_expensive_queries to "false" => assert failure ElasticsearchException e = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch("test-index").setQuery(scriptQuery(script)).get() + () -> prepareSearch("test-index").setQuery(scriptQuery(script)).get() ); assertEquals( "[script] queries cannot be executed when 'search.allow_expensive_queries' is set to false.", @@ -240,7 +242,7 @@ public void testDisallowExpensiveQueries() { // Set search.allow_expensive_queries to "true" => success updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); - assertNoFailures(client().prepareSearch("test-index").setQuery(scriptQuery(script))); + assertNoFailures(prepareSearch("test-index").setQuery(scriptQuery(script))); } finally { updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index 53bbe47149d32..c63aa19beb42e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -37,8 +37,7 @@ public class DuelScrollIT extends ESIntegTestCase { public void testDuelQueryThenFetch() throws Exception { TestContext context = create(SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH); - SearchResponse control = client().prepareSearch("index") - .setSearchType(context.searchType) + SearchResponse control = prepareSearch("index").setSearchType(context.searchType) .addSort(context.sort) .setSize(context.numDocs) .get(); @@ -47,8 +46,7 @@ public void testDuelQueryThenFetch() throws Exception { assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); assertThat(sh.getHits().length, equalTo(context.numDocs)); - SearchResponse searchScrollResponse = client().prepareSearch("index") - .setSearchType(context.searchType) + SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) .addSort(context.sort) .setSize(context.scrollRequestSize) .setScroll("10m") @@ -215,8 +213,7 @@ private int createIndex(boolean singleShard) throws Exception { private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int numDocs) throws Exception { final int size = scaledRandomIntBetween(5, numDocs + 5); - final SearchResponse control = client().prepareSearch("test") - .setSearchType(searchType) + final SearchResponse control = prepareSearch("test").setSearchType(searchType) .setSize(numDocs) .setQuery(QueryBuilders.matchQuery("foo", "true")) .addSort(SortBuilders.fieldSort("_doc")) @@ -224,8 +221,7 @@ private void testDuelIndexOrder(SearchType searchType, boolean trackScores, int .get(); assertNoFailures(control); - SearchResponse scroll = client().prepareSearch("test") - .setSearchType(searchType) + SearchResponse scroll = prepareSearch("test").setSearchType(searchType) .setSize(size) .setQuery(QueryBuilders.matchQuery("foo", "true")) .addSort(SortBuilders.fieldSort("_doc")) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 3b459f8d57934..f94e59cbe1ab4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -50,7 +50,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -82,8 +81,7 @@ public void testSimpleScrollQueryThenFetch() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .addSort("field", SortOrder.ASC) @@ -135,8 +133,7 @@ public void testSimpleScrollQueryThenFetchSmallSizeUnevenDistribution() throws E indicesAdmin().prepareRefresh().get(); - SearchResponse searchResponse = client().prepareSearch() - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.QUERY_THEN_FETCH) .setQuery(matchAllQuery()) .setSize(3) .setScroll(TimeValue.timeValueMinutes(2)) @@ -203,26 +200,13 @@ public void testScrollAndUpdateIndex() throws Exception { indicesAdmin().prepareRefresh().get(); - assertThat(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, - equalTo(500L) - ); - assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, - equalTo(500L) - ); - assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); + assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); + assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(500L)); + assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); + assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(0L)); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(queryStringQuery("user:kimchy")) + SearchResponse searchResponse = prepareSearch().setQuery(queryStringQuery("user:kimchy")) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .addSort("postDate", SortOrder.ASC) @@ -238,21 +222,15 @@ public void testScrollAndUpdateIndex() throws Exception { } while (searchResponse.getHits().getHits().length > 0); indicesAdmin().prepareRefresh().get(); - assertThat(client().prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); - assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, - equalTo(0L) - ); - assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, - equalTo(0L) - ); + assertThat(prepareSearch().setSize(0).setQuery(matchAllQuery()).get().getHits().getTotalHits().value, equalTo(500L)); + assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); + assertThat(prepareSearch().setSize(0).setQuery(termQuery("message", "test")).get().getHits().getTotalHits().value, equalTo(0L)); assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, + prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(500L) ); assertThat( - client().prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, + prepareSearch().setSize(0).setQuery(termQuery("message", "update")).get().getHits().getTotalHits().value, equalTo(500L) ); } finally { @@ -275,16 +253,14 @@ public void testSimpleScrollQueryThenFetch_clearScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse searchResponse1 = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - SearchResponse searchResponse2 = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -395,16 +371,14 @@ public void testSimpleScrollQueryThenFetchClearAllScrollIds() throws Exception { indicesAdmin().prepareRefresh().get(); - SearchResponse searchResponse1 = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse1 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) .addSort("field", SortOrder.ASC) .get(); - SearchResponse searchResponse2 = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse2 = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .setSearchType(SearchType.QUERY_THEN_FETCH) @@ -470,8 +444,7 @@ public void testDeepScrollingDoesNotBlowUp() throws Exception { updateIndexSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE), "index"); for (SearchType searchType : SearchType.values()) { - SearchRequestBuilder builder = client().prepareSearch("index") - .setSearchType(searchType) + SearchRequestBuilder builder = prepareSearch("index").setSearchType(searchType) .setQuery(QueryBuilders.matchAllQuery()) .setSize(Integer.MAX_VALUE) .setScroll("1m"); @@ -492,7 +465,7 @@ public void testThatNonExistingScrollIdReturnsCorrectException() throws Exceptio client().prepareIndex("index").setId("1").setSource("field", "value").execute().get(); refresh(); - SearchResponse searchResponse = client().prepareSearch("index").setSize(1).setScroll("1m").get(); + SearchResponse searchResponse = prepareSearch("index").setSize(1).setScroll("1m").get(); assertThat(searchResponse.getScrollId(), is(notNullValue())); ClearScrollResponse clearScrollResponse = client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); @@ -508,7 +481,7 @@ public void testStringSortMissingAscTerminates() throws Exception { client().prepareIndex("test").setId("1").setSource("some_field", "test").get(); refresh(); - SearchResponse response = client().prepareSearch("test") + SearchResponse response = prepareSearch("test") .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last")) .setScroll("1m") @@ -517,11 +490,11 @@ public void testStringSortMissingAscTerminates() throws Exception { assertSearchHits(response, "1"); response = client().prepareSearchScroll(response.getScrollId()).get(); - assertSearchResponse(response); + assertNoFailures(response); assertHitCount(response, 1); assertNoSearchHits(response); - response = client().prepareSearch("test") + response = prepareSearch("test") .addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first")) .setScroll("1m") @@ -540,8 +513,7 @@ public void testCloseAndReopenOrDeleteWithActiveScroll() { client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", i).get(); } refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(35) .setScroll(TimeValue.timeValueMinutes(2)) .addSort("field", SortOrder.ASC) @@ -604,7 +576,7 @@ public void testInvalidScrollKeepAlive() throws IOException { Exception exc = expectThrows( Exception.class, - () -> client().prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueHours(2)).get() + () -> prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueHours(2)).get() ); IllegalArgumentException illegalArgumentException = (IllegalArgumentException) ExceptionsHelper.unwrap( exc, @@ -613,11 +585,7 @@ public void testInvalidScrollKeepAlive() throws IOException { assertNotNull(illegalArgumentException); assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (2h) is too large")); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(1) - .setScroll(TimeValue.timeValueMinutes(5)) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)).get(); assertNotNull(searchResponse.getScrollId()); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); @@ -651,8 +619,7 @@ public void testScrollRewrittenToMatchNoDocs() { SearchResponse resp = null; try { int totalHits = 0; - resp = client().prepareSearch("test") - .setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + resp = prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setMaxConcurrentShardRequests(randomIntBetween(1, 3)) // sometimes fan out shard requests one by one .setSize(randomIntBetween(1, 2)) .setScroll(TimeValue.timeValueMinutes(1)) @@ -681,16 +648,14 @@ public void testRestartDataNodesDuringScrollSearch() throws Exception { index("prod", "prod-" + i, Map.of()); } indicesAdmin().prepareRefresh().get(); - SearchResponse respFromDemoIndex = client().prepareSearch("demo") - .setSize(randomIntBetween(1, 10)) + SearchResponse respFromDemoIndex = prepareSearch("demo").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); internalCluster().restartNode(dataNode, new InternalTestCluster.RestartCallback()); ensureGreen("demo", "prod"); - SearchResponse respFromProdIndex = client().prepareSearch("prod") - .setSize(randomIntBetween(1, 10)) + SearchResponse respFromProdIndex = prepareSearch("prod").setSize(randomIntBetween(1, 10)) .setQuery(new MatchAllQueryBuilder()) .setScroll(TimeValue.timeValueMinutes(5)) .get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java index 64597a3520118..96c007e05e414 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -58,8 +58,7 @@ public void testScanScrollWithShardExceptions() throws Exception { indexRandom(false, writes); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(10) .setScroll(TimeValue.timeValueMinutes(1)) .get(); @@ -75,7 +74,7 @@ public void testScanScrollWithShardExceptions() throws Exception { internalCluster().stopRandomNonMasterNode(); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).setScroll(TimeValue.timeValueMinutes(1)).get(); assertThat(searchResponse.getSuccessfulShards(), lessThan(searchResponse.getTotalShards())); numHits = 0; int numberOfSuccessfulShards = searchResponse.getSuccessfulShards(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 87a500691aab5..3ac8b103ce910 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -67,8 +67,7 @@ public void testsShouldFail() throws Exception { { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .addSort("field1", SortOrder.ASC) + () -> prepareSearch("test").addSort("field1", SortOrder.ASC) .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0 }) .setScroll("1m") @@ -83,8 +82,7 @@ public void testsShouldFail() throws Exception { { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .addSort("field1", SortOrder.ASC) + () -> prepareSearch("test").addSort("field1", SortOrder.ASC) .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0 }) .setFrom(10) @@ -99,7 +97,7 @@ public void testsShouldFail() throws Exception { { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test").setQuery(matchAllQuery()).searchAfter(new Object[] { 0.75f }).get() + () -> prepareSearch("test").setQuery(matchAllQuery()).searchAfter(new Object[] { 0.75f }).get() ); assertTrue(e.shardFailures().length > 0); for (ShardSearchFailure failure : e.shardFailures()) { @@ -110,8 +108,7 @@ public void testsShouldFail() throws Exception { { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .addSort("field2", SortOrder.DESC) + () -> prepareSearch("test").addSort("field2", SortOrder.DESC) .addSort("field1", SortOrder.ASC) .setQuery(matchAllQuery()) .searchAfter(new Object[] { 1 }) @@ -126,8 +123,7 @@ public void testsShouldFail() throws Exception { { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .setQuery(matchAllQuery()) + () -> prepareSearch("test").setQuery(matchAllQuery()) .addSort("field1", SortOrder.ASC) .searchAfter(new Object[] { 1, 2 }) .get() @@ -141,8 +137,7 @@ public void testsShouldFail() throws Exception { { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .setQuery(matchAllQuery()) + () -> prepareSearch("test").setQuery(matchAllQuery()) .addSort("field1", SortOrder.ASC) .searchAfter(new Object[] { "toto" }) .get() @@ -162,8 +157,7 @@ public void testWithNullStrings() throws InterruptedException { client().prepareIndex("test").setId("0").setSource("field1", 0), client().prepareIndex("test").setId("1").setSource("field1", 100, "field2", "toto") ); - SearchResponse searchResponse = client().prepareSearch("test") - .addSort("field1", SortOrder.ASC) + SearchResponse searchResponse = prepareSearch("test").addSort("field1", SortOrder.ASC) .addSort("field2", SortOrder.ASC) .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0, null }) @@ -235,8 +229,7 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { .add(new IndexRequest("test").id("5").source("start_date", "2017-01-20", "end_date", "2025-05-28")) .get(); - SearchResponse resp = client().prepareSearch("test") - .addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + SearchResponse resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) .setSize(2) .get(); @@ -244,8 +237,7 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("22/01/2015", "2022-07-23")); assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("21/02/2016", "2024-03-24")); - resp = client().prepareSearch("test") - .addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("yyyy-MM-dd")) .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) .setSize(2) @@ -254,8 +246,7 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", "2025-05-28")); assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", "2021-02-22")); - resp = client().prepareSearch("test") - .addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + resp = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date")) // it's okay because end_date has the format "yyyy-MM-dd" .searchAfter(new String[] { "21/02/2016", "2024-03-24" }) .setSize(2) @@ -264,15 +255,13 @@ public void testWithCustomFormatSortValueOfDateField() throws Exception { assertThat(resp.getHits().getHits()[0].getSortValues(), arrayContaining("20/01/2017", 1748390400000L)); assertThat(resp.getHits().getHits()[1].getSortValues(), arrayContaining("23/04/2018", 1613952000000L)); - SearchRequestBuilder searchRequest = client().prepareSearch("test") - .addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + SearchRequestBuilder searchRequest = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("epoch_millis")) .searchAfter(new Object[] { "21/02/2016", 1748390400000L }) .setSize(2); assertNoFailures(searchRequest); - searchRequest = client().prepareSearch("test") - .addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) + searchRequest = prepareSearch("test").addSort(SortBuilders.fieldSort("start_date").setFormat("dd/MM/yyyy")) .addSort(SortBuilders.fieldSort("end_date").setFormat("epoch_millis")) // wrong format .searchAfter(new Object[] { "21/02/2016", "23/04/2018" }) .setSize(2); @@ -334,7 +323,7 @@ private void assertSearchFromWithSortValues(String indexName, List> int offset = 0; Object[] sortValues = null; while (offset < documents.size()) { - SearchRequestBuilder req = client().prepareSearch(indexName); + SearchRequestBuilder req = prepareSearch(indexName); for (int i = 0; i < documents.get(0).size(); i++) { req.addSort("field" + Integer.toString(i), SortOrder.ASC); } @@ -441,8 +430,7 @@ public void testScrollAndSearchAfterWithBigIndex() { Collections.sort(timestamps); // scroll with big index { - SearchResponse resp = client().prepareSearch("test") - .setSize(randomIntBetween(50, 100)) + SearchResponse resp = prepareSearch("test").setSize(randomIntBetween(50, 100)) .setQuery(new MatchAllQueryBuilder()) .addSort(new FieldSortBuilder("timestamp")) .setScroll(TimeValue.timeValueMinutes(5)) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index d71345a42dd2c..61490cac43e45 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -46,6 +46,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -62,14 +63,11 @@ protected Collection> nodePlugins() { } public void testSearchNullIndex() { - expectThrows( - NullPointerException.class, - () -> client().prepareSearch((String) null).setQuery(QueryBuilders.termQuery("_id", "XXX1")).get() - ); + expectThrows(NullPointerException.class, () -> prepareSearch((String) null).setQuery(QueryBuilders.termQuery("_id", "XXX1")).get()); expectThrows( NullPointerException.class, - () -> client().prepareSearch((String[]) null).setQuery(QueryBuilders.termQuery("_id", "XXX1")).get() + () -> prepareSearch((String[]) null).setQuery(QueryBuilders.termQuery("_id", "XXX1")).get() ); } @@ -94,7 +92,7 @@ public void testSearchRandomPreference() throws InterruptedException, ExecutionE randomPreference = randomUnicodeOfLengthBetween(0, 4); } // id is not indexed, but lets see that we automatically convert to - assertHitCount(client().prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomPreference), 6L); + assertHitCount(prepareSearch().setQuery(QueryBuilders.matchAllQuery()).setPreference(randomPreference), 6L); } } @@ -121,8 +119,7 @@ public void testSimpleIp() throws Exception { client().prepareIndex("test").setId("1").setSource("from", "192.168.0.5", "to", "192.168.0.10").setRefreshPolicy(IMMEDIATE).get(); assertHitCount( - client().prepareSearch() - .setQuery(boolQuery().must(rangeQuery("from").lte("192.168.0.7")).must(rangeQuery("to").gte("192.168.0.7"))), + prepareSearch().setQuery(boolQuery().must(rangeQuery("from").lte("192.168.0.7")).must(rangeQuery("to").gte("192.168.0.7"))), 1L ); } @@ -153,22 +150,19 @@ public void testIpCidr() throws Exception { client().prepareIndex("test").setId("5").setSource("ip", "2001:db8::ff00:42:8329").get(); refresh(); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1"))), 1L); - assertHitCount(client().prepareSearch().setQuery(queryStringQuery("ip: 192.168.0.1")), 1L); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32"))), 1L); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.0/24"))), 3L); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.0.0.0/8"))), 4L); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0.0.0.0/0"))), 4L); - assertHitCount( - client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "2001:db8::ff00:42:8329/128"))), - 1L - ); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "2001:db8::/64"))), 1L); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "::/0"))), 5L); - assertHitCount(client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.5/32"))), 0L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1"))), 1L); + assertHitCount(prepareSearch().setQuery(queryStringQuery("ip: 192.168.0.1")), 1L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.1/32"))), 1L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.0.0/24"))), 3L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.0.0.0/8"))), 4L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0.0.0.0/0"))), 4L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "2001:db8::ff00:42:8329/128"))), 1L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "2001:db8::/64"))), 1L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "::/0"))), 5L); + assertHitCount(prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "192.168.1.5/32"))), 0L); assertFailures( - client().prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0/0/0/0/0"))), + prepareSearch().setQuery(boolQuery().must(QueryBuilders.termQuery("ip", "0/0/0/0/0"))), RestStatus.BAD_REQUEST, containsString("Expected [ip/prefix] but was [0/0/0/0/0]") ); @@ -179,8 +173,8 @@ public void testSimpleId() { client().prepareIndex("test").setId("XXX1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); // id is not indexed, but lets see that we automatically convert to - assertHitCount(client().prepareSearch().setQuery(QueryBuilders.termQuery("_id", "XXX1")), 1L); - assertHitCount(client().prepareSearch().setQuery(QueryBuilders.queryStringQuery("_id:XXX1")), 1L); + assertHitCount(prepareSearch().setQuery(QueryBuilders.termQuery("_id", "XXX1")), 1L); + assertHitCount(prepareSearch().setQuery(QueryBuilders.queryStringQuery("_id:XXX1")), 1L); } public void testSimpleDateRange() throws Exception { @@ -190,42 +184,33 @@ public void testSimpleDateRange() throws Exception { client().prepareIndex("test").setId("3").setSource("field", "1967-01-01T00:00").get(); ensureGreen(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")) - .get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2L); - - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-05T02:00").lte("2010-01-06T02:00")) - .get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2L); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-03||+2d").lte("2010-01-04||+2d/d")), + 2L + ); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-05T02:00").lt("2010-01-06T02:00")) - .get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-05T02:00").lte("2010-01-06T02:00")), + 2L + ); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("field").gt("2010-01-05T02:00").lt("2010-01-06T02:00")) - .get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 0L); + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("2010-01-05T02:00").lt("2010-01-06T02:00")), + 1L + ); - assertHitCount( - client().prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("field:[2010-01-03||+2d TO 2010-01-04||+2d/d]")), - 2L + assertHitCountAndNoFailures( + prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("2010-01-05T02:00").lt("2010-01-06T02:00")), + 0L ); + assertHitCount(prepareSearch("test").setQuery(QueryBuilders.queryStringQuery("field:[2010-01-03||+2d TO 2010-01-04||+2d/d]")), 2L); + // a string value of "1000" should be parsed as the year 1000 and return all three docs - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("1000")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 3L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("1000")), 3L); // a numeric value of 1000 should be parsed as 1000 millis since epoch and return only docs after 1970 - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt(1000)).get(); + SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt(1000)).get(); assertNoFailures(searchResponse); assertHitCount(searchResponse, 2L); String[] expectedIds = new String[] { "1", "2" }; @@ -245,37 +230,14 @@ public void testRangeQueryKeyword() throws Exception { ensureGreen(); refresh(); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("A").lte("B")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("A").lte("B")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("A").lt("B")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 1L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(null).lt("C")).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 3L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("B").lt(null)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 2L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt(null).lt(null)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 4L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("").lt(null)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 4L); - - searchResponse = client().prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("").lt(null)).get(); - assertNoFailures(searchResponse); - assertHitCount(searchResponse, 3L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("A").lte("B")), 2L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("A").lte("B")), 1L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("A").lt("B")), 1L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(null).lt("C")), 3L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("B").lt(null)), 2L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt(null).lt(null)), 4L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte("").lt(null)), 4L); + assertHitCountAndNoFailures(prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gt("").lt(null)), 3L); } public void testSimpleTerminateAfterCount() throws Exception { @@ -295,18 +257,12 @@ public void testSimpleTerminateAfterCount() throws Exception { SearchResponse searchResponse; for (int i = 1; i < max; i++) { - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)) - .setTerminateAfter(i) - .get(); + searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)).setTerminateAfter(i).get(); assertHitCount(searchResponse, i); assertTrue(searchResponse.isTerminatedEarly()); } - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)) - .setTerminateAfter(2 * max) - .get(); + searchResponse = prepareSearch("test").setQuery(QueryBuilders.rangeQuery("field").gte(1).lte(max)).setTerminateAfter(2 * max).get(); assertHitCount(searchResponse, max); assertFalse(searchResponse.isTerminatedEarly()); @@ -329,8 +285,7 @@ public void testSimpleIndexSortEarlyTerminate() throws Exception { SearchResponse searchResponse; for (int i = 1; i < max; i++) { - searchResponse = client().prepareSearch("test") - .addDocValueField("rank") + searchResponse = prepareSearch("test").addDocValueField("rank") .setTrackTotalHits(false) .addSort("rank", SortOrder.ASC) .setSize(i) @@ -346,19 +301,18 @@ public void testInsaneFromAndSize() throws Exception { createIndex("idx"); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertWindowFails(client().prepareSearch("idx").setFrom(Integer.MAX_VALUE)); - assertWindowFails(client().prepareSearch("idx").setSize(Integer.MAX_VALUE)); + assertWindowFails(prepareSearch("idx").setFrom(Integer.MAX_VALUE)); + assertWindowFails(prepareSearch("idx").setSize(Integer.MAX_VALUE)); } public void testTooLargeFromAndSize() throws Exception { createIndex("idx"); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertWindowFails(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); - assertWindowFails(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1)); + assertWindowFails(prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY))); + assertWindowFails(prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1)); assertWindowFails( - client().prepareSearch("idx") - .setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) ); } @@ -367,11 +321,10 @@ public void testLargeFromAndSizeSucceeds() throws Exception { createIndex("idx"); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) - 10), 1); - assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1); + assertHitCount(prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) - 10), 1); + assertHitCount(prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1); assertHitCount( - client().prepareSearch("idx") - .setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2) + prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2) .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) / 2 - 1), 1 ); @@ -384,11 +337,10 @@ public void testTooLargeFromAndSizeOkBySetting() throws Exception { ).get(); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1); - assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1), 1); + assertHitCount(prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1); + assertHitCount(prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1), 1); assertHitCount( - client().prepareSearch("idx") - .setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1 ); @@ -403,11 +355,10 @@ public void testTooLargeFromAndSizeOkByDynamicSetting() throws Exception { ); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1); - assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1), 1); + assertHitCount(prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1); + assertHitCount(prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) + 1), 1); assertHitCount( - client().prepareSearch("idx") - .setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) + prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)) .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY)), 1 ); @@ -417,11 +368,10 @@ public void testTooLargeFromAndSizeBackwardsCompatibilityRecommendation() throws prepareCreate("idx").setSettings(Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), Integer.MAX_VALUE)).get(); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount(client().prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10), 1); - assertHitCount(client().prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10), 1); + assertHitCount(prepareSearch("idx").setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10), 1); + assertHitCount(prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10), 1); assertHitCount( - client().prepareSearch("idx") - .setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10) + prepareSearch("idx").setSize(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10) .setFrom(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY) * 10), 1 ); @@ -441,10 +391,7 @@ public void testTooLargeRescoreOkBySetting() throws Exception { .get(); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount( - client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), - 1 - ); + assertHitCount(prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), 1); } public void testTooLargeRescoreOkByResultWindowSetting() throws Exception { @@ -458,10 +405,7 @@ public void testTooLargeRescoreOkByResultWindowSetting() throws Exception { ).get(); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount( - client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), - 1 - ); + assertHitCount(prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), 1); } public void testTooLargeRescoreOkByDynamicSetting() throws Exception { @@ -470,10 +414,7 @@ public void testTooLargeRescoreOkByDynamicSetting() throws Exception { updateIndexSettings(Settings.builder().put(IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey(), defaultMaxWindow * 2), "idx"); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount( - client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), - 1 - ); + assertHitCount(prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), 1); } public void testTooLargeRescoreOkByDynamicResultWindowSetting() throws Exception { @@ -486,10 +427,7 @@ public void testTooLargeRescoreOkByDynamicResultWindowSetting() throws Exception ); indexRandom(true, client().prepareIndex("idx").setSource("{}", XContentType.JSON)); - assertHitCount( - client().prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), - 1 - ); + assertHitCount(prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(defaultMaxWindow + 1)), 1); } public void testQueryNumericFieldWithRegex() throws Exception { @@ -497,7 +435,7 @@ public void testQueryNumericFieldWithRegex() throws Exception { ensureGreen("idx"); try { - client().prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", "34")).get(); + prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", "34")).get(); fail("SearchPhaseExecutionException should have been thrown"); } catch (SearchPhaseExecutionException ex) { assertThat(ex.getRootCause().getMessage(), containsString("Can only use regexp queries on keyword and text fields")); @@ -518,8 +456,7 @@ public void testTermQueryBigInt() throws Exception { XContentParser parser = createParser(JsonXContent.jsonXContent, queryJson); parser.nextToken(); TermQueryBuilder query = TermQueryBuilder.fromXContent(parser); - SearchResponse searchResponse = client().prepareSearch("idx").setQuery(query).get(); - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertHitCount(prepareSearch("idx").setQuery(query), 1); } public void testTooLongRegexInRegexpQuery() throws Exception { @@ -533,7 +470,7 @@ public void testTooLongRegexInRegexpQuery() throws Exception { } SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", regexp.toString())).get() + () -> prepareSearch("idx").setQuery(QueryBuilders.regexpQuery("num", regexp.toString())).get() ); assertThat( e.getRootCause().getMessage(), @@ -564,8 +501,7 @@ private void assertWindowFails(SearchRequestBuilder search) { } private void assertRescoreWindowFails(int windowSize) { - SearchRequestBuilder search = client().prepareSearch("idx") - .addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(windowSize)); + SearchRequestBuilder search = prepareSearch("idx").addRescorer(new QueryRescorerBuilder(matchAllQuery()).windowSize(windowSize)); SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> search.get()); assertThat( e.toString(), diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index e32bc4e9b35ad..948b7261ded1c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -92,16 +92,14 @@ public void testSearchSort() throws Exception { for (String field : new String[] { "_id", "random_int", "static_int" }) { int fetchSize = randomIntBetween(10, 100); // test _doc sort - SearchRequestBuilder request = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); assertSearchSlicesWithScroll(request, field, max, numDocs); // test numeric sort - request = client().prepareSearch("test") - .setQuery(matchAllQuery()) + request = prepareSearch("test").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .addSort(SortBuilders.fieldSort("random_int")) .setSize(fetchSize); @@ -114,12 +112,11 @@ public void testWithPreferenceAndRoutings() throws Exception { int totalDocs = randomIntBetween(100, 1000); setupIndex(totalDocs, numShards); { - SearchResponse sr = client().prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0).get(); + SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0).get(); int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); - SearchRequestBuilder request = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .setSize(fetchSize) .setPreference("_shards:1,4") @@ -127,12 +124,11 @@ public void testWithPreferenceAndRoutings() throws Exception { assertSearchSlicesWithScroll(request, "_id", max, numDocs); } { - SearchResponse sr = client().prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0).get(); + SearchResponse sr = prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0).get(); int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); - SearchRequestBuilder request = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .setSize(fetchSize) .setRouting("foo", "bar") @@ -145,14 +141,12 @@ public void testWithPreferenceAndRoutings() throws Exception { .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias1").routing("foo")) .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias2").routing("bar")) .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) - .get() ); - SearchResponse sr = client().prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0).get(); + SearchResponse sr = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0).get(); int numDocs = (int) sr.getHits().getTotalHits().value; int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); - SearchRequestBuilder request = client().prepareSearch("alias1", "alias3") - .setQuery(matchAllQuery()) + SearchRequestBuilder request = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .setSize(fetchSize) .addSort(SortBuilders.fieldSort("_doc")); @@ -222,8 +216,7 @@ private void assertSearchSlicesWithPointInTime(String sliceField, String sortFie for (int id = 0; id < numSlice; id++) { int numSliceResults = 0; - SearchRequestBuilder request = client().prepareSearch("test") - .slice(new SliceBuilder(sliceField, id, numSlice)) + SearchRequestBuilder request = prepareSearch("test").slice(new SliceBuilder(sliceField, id, numSlice)) .setPointInTime(new PointInTimeBuilder(pointInTimeId)) .addSort(SortBuilders.fieldSort(sortField)) .setSize(randomIntBetween(10, 100)); @@ -257,8 +250,7 @@ public void testInvalidFields() throws Exception { setupIndex(0, 1); SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .setQuery(matchAllQuery()) + () -> prepareSearch("test").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .slice(new SliceBuilder("invalid_random_int", 0, 10)) .get() @@ -269,8 +261,7 @@ public void testInvalidFields() throws Exception { exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test") - .setQuery(matchAllQuery()) + () -> prepareSearch("test").setQuery(matchAllQuery()) .setScroll(new Scroll(TimeValue.timeValueSeconds(10))) .slice(new SliceBuilder("invalid_random_kw", 0, 10)) .get() @@ -284,7 +275,7 @@ public void testInvalidQuery() throws Exception { setupIndex(0, 1); SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch().setQuery(matchAllQuery()).slice(new SliceBuilder("invalid_random_int", 0, 10)).get() + () -> prepareSearch().setQuery(matchAllQuery()).slice(new SliceBuilder("invalid_random_int", 0, 10)).get() ); Throwable rootCause = findRootCause(exc); assertThat(rootCause.getClass(), equalTo(SearchException.class)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 3b6f29fd1ec90..2926d36becb4a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -63,7 +63,6 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -121,12 +120,11 @@ public void testIssue8226() { } refresh(); // sort DESC - SearchResponse searchResponse = client().prepareSearch() - .addSort(new FieldSortBuilder("entry").order(SortOrder.DESC).unmappedType(useMapping ? null : "long")) - .setSize(10) - .get(); + SearchResponse searchResponse = prepareSearch().addSort( + new FieldSortBuilder("entry").order(SortOrder.DESC).unmappedType(useMapping ? null : "long") + ).setSize(10).get(); logClusterState(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); for (int j = 1; j < searchResponse.getHits().getHits().length; j++) { Number current = (Number) searchResponse.getHits().getHits()[j].getSourceAsMap().get("entry"); @@ -135,12 +133,11 @@ public void testIssue8226() { } // sort ASC - searchResponse = client().prepareSearch() - .addSort(new FieldSortBuilder("entry").order(SortOrder.ASC).unmappedType(useMapping ? null : "long")) - .setSize(10) - .get(); + searchResponse = prepareSearch().addSort( + new FieldSortBuilder("entry").order(SortOrder.ASC).unmappedType(useMapping ? null : "long") + ).setSize(10).get(); logClusterState(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); for (int j = 1; j < searchResponse.getHits().getHits().length; j++) { Number current = (Number) searchResponse.getHits().getHits()[j].getSourceAsMap().get("entry"); @@ -175,29 +172,24 @@ public void testIssue6614() throws ExecutionException, InterruptedException { docs += builders.size(); builders.clear(); } - SearchResponse allDocsResponse = client().prepareSearch() - .setQuery( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("foo", "bar")) - .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/0" + randomIntBetween(1, 7) + "/01")) - ) - .addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")) - .setSize(docs) - .get(); - assertSearchResponse(allDocsResponse); + SearchResponse allDocsResponse = prepareSearch().setQuery( + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("foo", "bar")) + .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/0" + randomIntBetween(1, 7) + "/01")) + ).addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")).setSize(docs).get(); + assertNoFailures(allDocsResponse); final int numiters = randomIntBetween(1, 20); for (int i = 0; i < numiters; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("foo", "bar")) - .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/" + Strings.format("%02d", randomIntBetween(1, 7)) + "/01")) - ) + SearchResponse searchResponse = prepareSearch().setQuery( + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("foo", "bar")) + .must(QueryBuilders.rangeQuery("timeUpdated").gte("2014/" + Strings.format("%02d", randomIntBetween(1, 7)) + "/01")) + ) .addSort(new FieldSortBuilder("timeUpdated").order(SortOrder.ASC).unmappedType("date")) .setSize(scaledRandomIntBetween(1, docs)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); for (int j = 0; j < searchResponse.getHits().getHits().length; j++) { assertThat( searchResponse.toString() + "\n vs. \n" + allDocsResponse.toString(), @@ -222,7 +214,7 @@ public void testTrackScores() throws Exception { ); refresh(); - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getMaxScore(), equalTo(Float.NaN)); for (SearchHit hit : searchResponse.getHits()) { @@ -230,7 +222,7 @@ public void testTrackScores() throws Exception { } // now check with score tracking - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).setTrackScores(true).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addSort("svalue", SortOrder.ASC).setTrackScores(true).get(); assertThat(searchResponse.getHits().getMaxScore(), not(equalTo(Float.NaN))); for (SearchHit hit : searchResponse.getHits()) { @@ -281,8 +273,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut indexRandom(true, builders); { int size = between(1, denseBytes.size()); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .setSize(size) .addSort("dense_bytes", SortOrder.ASC) .get(); @@ -300,8 +291,7 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut } if (sparseBytes.isEmpty() == false) { int size = between(1, sparseBytes.size()); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setPostFilter(QueryBuilders.existsQuery("sparse_bytes")) .setSize(size) .addSort("sparse_bytes", SortOrder.ASC) @@ -328,8 +318,7 @@ public void test3078() { client().prepareIndex("test").setId(Integer.toString(i)).setSource("field", Integer.toString(i)).get(); } refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) .get(); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); @@ -340,8 +329,7 @@ public void test3078() { client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); refresh(); - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) .get(); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); @@ -351,8 +339,7 @@ public void test3078() { // reindex - no refresh client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) .get(); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); @@ -364,8 +351,7 @@ public void test3078() { refresh(); client().prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) .get(); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); @@ -373,8 +359,7 @@ public void test3078() { assertThat(searchResponse.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); refresh(); - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)) .get(); assertThat(searchResponse.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); @@ -392,29 +377,27 @@ public void testScoreSortDirection() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field"))) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) + ).get(); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field"))) - .addSort("_score", SortOrder.DESC) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) + ).addSort("_score", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field"))) - .addSort("_score", SortOrder.DESC) - .get(); + searchResponse = prepareSearch("test").setQuery( + QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) + ).addSort("_score", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); @@ -430,17 +413,16 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery( + functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field")) + ).get(); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(0).getScore())); assertThat(searchResponse.getHits().getAt(1).getId(), equalTo("2")); assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - searchResponse = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) + searchResponse = prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) .addSort("_score", SortOrder.DESC) .get(); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); @@ -449,8 +431,7 @@ public void testScoreSortDirectionWithFunctionScore() throws Exception { assertThat(searchResponse.getHits().getAt(2).getScore(), Matchers.lessThan(searchResponse.getHits().getAt(1).getScore())); assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); - searchResponse = client().prepareSearch("test") - .setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) + searchResponse = prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), fieldValueFactorFunction("field"))) .addSort("_score", SortOrder.DESC) .get(); assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); @@ -465,11 +446,7 @@ public void testIssue2986() { client().prepareIndex("test").setId("2").setSource("{\"field1\":\"value2\"}", XContentType.JSON).get(); client().prepareIndex("test").setId("3").setSource("{\"field1\":\"value3\"}", XContentType.JSON).get(); refresh(); - SearchResponse result = client().prepareSearch("test") - .setQuery(matchAllQuery()) - .setTrackScores(true) - .addSort("field1", SortOrder.ASC) - .get(); + SearchResponse result = prepareSearch("test").setQuery(matchAllQuery()).setTrackScores(true).addSort("field1", SortOrder.ASC).get(); for (SearchHit hit : result.getHits()) { assertFalse(Float.isNaN(hit.getScore())); @@ -497,8 +474,7 @@ public void testIssue2991() { client().prepareIndex("test").setId("2").setSource("tag", "beta").get(); refresh(); - SearchResponse resp = client().prepareSearch("test") - .setSize(2) + SearchResponse resp = prepareSearch("test").setSize(2) .setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("tag").order(SortOrder.ASC)) .get(); @@ -507,8 +483,7 @@ public void testIssue2991() { assertFirstHit(resp, hasId("1")); assertSecondHit(resp, hasId("2")); - resp = client().prepareSearch("test") - .setSize(2) + resp = prepareSearch("test").setSize(2) .setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("tag").order(SortOrder.DESC)) .get(); @@ -592,11 +567,7 @@ public void testSimpleSorts() throws Exception { // STRING int size = 1 + random.nextInt(10); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(size) - .addSort("str_value", SortOrder.ASC) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); for (int i = 0; i < size; i++) { @@ -607,7 +578,7 @@ public void testSimpleSorts() throws Exception { ); } size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -623,7 +594,7 @@ public void testSimpleSorts() throws Exception { // BYTE size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -632,7 +603,7 @@ public void testSimpleSorts() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).byteValue(), equalTo((byte) i)); } size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("byte_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -645,7 +616,7 @@ public void testSimpleSorts() throws Exception { // SHORT size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -654,7 +625,7 @@ public void testSimpleSorts() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(i).getSortValues()[0]).shortValue(), equalTo((short) i)); } size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("short_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -667,7 +638,7 @@ public void testSimpleSorts() throws Exception { // INTEGER size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -678,7 +649,7 @@ public void testSimpleSorts() throws Exception { assertThat(searchResponse.toString(), not(containsString("error"))); size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("integer_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -691,7 +662,7 @@ public void testSimpleSorts() throws Exception { // LONG size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -702,7 +673,7 @@ public void testSimpleSorts() throws Exception { assertThat(searchResponse.toString(), not(containsString("error"))); size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("long_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10L); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -715,7 +686,7 @@ public void testSimpleSorts() throws Exception { // FLOAT size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10L); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -726,7 +697,7 @@ public void testSimpleSorts() throws Exception { assertThat(searchResponse.toString(), not(containsString("error"))); size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -739,7 +710,7 @@ public void testSimpleSorts() throws Exception { // DOUBLE size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.ASC).get(); assertHitCount(searchResponse, 10L); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -750,7 +721,7 @@ public void testSimpleSorts() throws Exception { assertThat(searchResponse.toString(), not(containsString("error"))); size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10L); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -797,8 +768,7 @@ public void testSortMissingNumbers() throws Exception { refresh(); logger.info("--> sort with no missing (same as missing _last)"); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)) .get(); assertNoFailures(searchResponse); @@ -809,8 +779,7 @@ public void testSortMissingNumbers() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); logger.info("--> sort with missing _last"); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")) .get(); assertNoFailures(searchResponse); @@ -821,8 +790,7 @@ public void testSortMissingNumbers() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); logger.info("--> sort with missing _first"); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")) .get(); assertNoFailures(searchResponse); @@ -872,8 +840,7 @@ public void testSortMissingStrings() throws IOException { } logger.info("--> sort with no missing (same as missing _last)"); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)) .get(); assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); @@ -884,8 +851,7 @@ public void testSortMissingStrings() throws IOException { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); logger.info("--> sort with missing _last"); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")) .get(); assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); @@ -896,8 +862,7 @@ public void testSortMissingStrings() throws IOException { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("2")); logger.info("--> sort with missing _first"); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_first")) .get(); assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); @@ -908,8 +873,7 @@ public void testSortMissingStrings() throws IOException { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo("3")); logger.info("--> sort with missing b"); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("b")) .get(); assertThat(Arrays.toString(searchResponse.getShardFailures()), searchResponse.getFailedShards(), equalTo(0)); @@ -950,24 +914,23 @@ public void testSortMissingDates() throws IOException { format = type.equals("date") ? "strict_date_optional_time" : "strict_date_optional_time_nanos"; } - SearchResponse searchResponse = client().prepareSearch(index) - .addSort(SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format)) - .get(); + SearchResponse searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format) + ).get(); assertHitsInOrder(searchResponse, new String[] { "1", "2", "3" }); - searchResponse = client().prepareSearch(index) - .addSort(SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format)) - .get(); + searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format) + ).get(); assertHitsInOrder(searchResponse, new String[] { "3", "1", "2" }); - searchResponse = client().prepareSearch(index) - .addSort(SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format)) + searchResponse = prepareSearch(index).addSort(SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format)) .get(); assertHitsInOrder(searchResponse, new String[] { "2", "1", "3" }); - searchResponse = client().prepareSearch(index) - .addSort(SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format)) - .get(); + searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format) + ).get(); assertHitsInOrder(searchResponse, new String[] { "3", "2", "1" }); } } @@ -1012,32 +975,24 @@ public void testSortMissingDatesMixedTypes() throws IOException { } String index = "test*"; - SearchResponse searchResponse = client().prepareSearch(index) - .addSort(SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format).setNumericType("date_nanos")) - .addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)) - .get(); + SearchResponse searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.ASC).setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); assertHitsInOrder(searchResponse, new String[] { "1", "2", "4", "5", "3", "6" }); - searchResponse = client().prepareSearch(index) - .addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format).setNumericType("date_nanos") - ) - .addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)) - .get(); + searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.ASC).missing("_first").setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); assertHitsInOrder(searchResponse, new String[] { "3", "6", "1", "2", "4", "5" }); - searchResponse = client().prepareSearch(index) - .addSort(SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format).setNumericType("date_nanos")) - .addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)) - .get(); + searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.DESC).setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); assertHitsInOrder(searchResponse, new String[] { "5", "4", "2", "1", "3", "6" }); - searchResponse = client().prepareSearch(index) - .addSort( - SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format).setNumericType("date_nanos") - ) - .addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)) - .get(); + searchResponse = prepareSearch(index).addSort( + SortBuilders.fieldSort("mydate").order(SortOrder.DESC).missing("_first").setFormat(format).setNumericType("date_nanos") + ).addSort(SortBuilders.fieldSort("other_field").order(SortOrder.ASC)).get(); assertHitsInOrder(searchResponse, new String[] { "3", "6", "5", "4", "2", "1" }); } } @@ -1062,7 +1017,7 @@ public void testIgnoreUnmapped() throws Exception { logger.info("--> sort with an unmapped field, verify it fails"); try { - SearchResponse result = client().prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("kkk")).get(); + SearchResponse result = prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("kkk")).get(); assertThat("Expected exception but returned with", result, nullValue()); } catch (SearchPhaseExecutionException e) { // we check that it's a parse failure rather than a different shard failure @@ -1071,12 +1026,11 @@ public void testIgnoreUnmapped() throws Exception { } } - assertNoFailures(client().prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("kkk").unmappedType("keyword"))); + assertNoFailures(prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("kkk").unmappedType("keyword"))); // nested field assertNoFailures( - client().prepareSearch() - .setQuery(matchAllQuery()) + prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("nested.foo") .unmappedType("keyword") @@ -1086,8 +1040,7 @@ public void testIgnoreUnmapped() throws Exception { // nestedQuery assertNoFailures( - client().prepareSearch() - .setQuery(matchAllQuery()) + prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("nested.foo") .unmappedType("keyword") @@ -1176,11 +1129,7 @@ public void testSortMVField() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(10) - .addSort("long_values", SortOrder.ASC) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1194,7 +1143,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(7L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1208,8 +1157,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(3L)); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)) .get(); @@ -1226,8 +1174,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)) .get(); @@ -1244,8 +1191,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)) .get(); @@ -1262,7 +1208,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1276,7 +1222,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1290,7 +1236,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1304,7 +1250,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1318,7 +1264,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1332,7 +1278,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1346,7 +1292,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1360,7 +1306,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1374,7 +1320,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1388,7 +1334,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1402,7 +1348,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1416,7 +1362,7 @@ public void testSortMVField() throws Exception { assertThat(searchResponse.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(searchResponse.getHits().getAt(2).getSortValues()[0], equalTo("07")); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1453,11 +1399,7 @@ public void testSortOnRareField() throws IOException { .get(); refresh(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(3) - .addSort("string_values", SortOrder.DESC) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); @@ -1476,7 +1418,7 @@ public void testSortOnRareField() throws IOException { } refresh(); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(2).addSort("string_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(2).addSort("string_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); @@ -1498,7 +1440,7 @@ public void testSortOnRareField() throws IOException { } refresh(); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1519,7 +1461,7 @@ public void testSortOnRareField() throws IOException { refresh(); } - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC).get(); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); @@ -1546,8 +1488,7 @@ public void testSortMetaField() throws Exception { indexRandom(true, indexReqs); SortOrder order = randomFrom(SortOrder.values()); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(randomIntBetween(1, numDocs + 5)) .addSort("_id", order) .get(); @@ -1647,8 +1588,7 @@ public void testNestedSort() throws IOException, InterruptedException, Execution // We sort on nested field - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("nested.foo").setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)) .get(); assertNoFailures(searchResponse); @@ -1660,8 +1600,7 @@ public void testNestedSort() throws IOException, InterruptedException, Execution assertThat(hits[1].getSortValues()[0], is("bar")); // We sort on nested fields with max_children limit - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("nested.foo").setNestedSort(new NestedSortBuilder("nested").setMaxChildren(1)).order(SortOrder.DESC) ) @@ -1677,8 +1616,7 @@ public void testNestedSort() throws IOException, InterruptedException, Execution { SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch() - .setQuery(matchAllQuery()) + () -> prepareSearch().setQuery(matchAllQuery()) .addSort( SortBuilders.fieldSort("nested.bar.foo") .setNestedSort( @@ -1692,8 +1630,7 @@ public void testNestedSort() throws IOException, InterruptedException, Execution } // We sort on nested sub field - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort("nested.foo.sub").setNestedSort(new NestedSortBuilder("nested")).order(SortOrder.DESC)) .get(); assertNoFailures(searchResponse); @@ -1707,7 +1644,7 @@ public void testNestedSort() throws IOException, InterruptedException, Execution // missing nested path SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("nested.foo")).get() + () -> prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("nested.foo")).get() ); assertThat(exc.toString(), containsString("it is mandatory to set the [nested] context")); } @@ -1717,12 +1654,11 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception assertAcked( prepareCreate("test1").setSettings( Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(2, maximumNumberOfShards())) - ).setMapping(sortField, "type=long").get() + ).setMapping(sortField, "type=long") ); assertAcked( prepareCreate("test2").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)) .setMapping(sortField, "type=long") - .get() ); for (String index : new String[] { "test1", "test2" }) { @@ -1737,9 +1673,9 @@ public void testSortDuelBetweenSingleShardAndMultiShardIndex() throws Exception SortOrder order = randomBoolean() ? SortOrder.ASC : SortOrder.DESC; int from = between(0, 256); int size = between(0, 256); - SearchResponse multiShardResponse = client().prepareSearch("test1").setFrom(from).setSize(size).addSort(sortField, order).get(); + SearchResponse multiShardResponse = prepareSearch("test1").setFrom(from).setSize(size).addSort(sortField, order).get(); assertNoFailures(multiShardResponse); - SearchResponse singleShardResponse = client().prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order).get(); + SearchResponse singleShardResponse = prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order).get(); assertNoFailures(singleShardResponse); assertThat(multiShardResponse.getHits().getTotalHits().value, equalTo(singleShardResponse.getHits().getTotalHits().value)); @@ -1764,14 +1700,14 @@ public void testCustomFormat() throws Exception { client().prepareIndex("test").setId("2").setSource("ip", "2001:db8::ff00:42:8329") ); - SearchResponse response = client().prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).get(); + assertNoFailures(response); assertEquals(2, response.getHits().getTotalHits().value); assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); - response = client().prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }).get(); - assertSearchResponse(response); + response = prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }).get(); + assertNoFailures(response); assertEquals(2, response.getHits().getTotalHits().value); assertEquals(1, response.getHits().getHits().length); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); @@ -1792,8 +1728,7 @@ public void testScriptFieldSort() throws Exception { { Script script = new Script(ScriptType.INLINE, NAME, "doc['number'].value", Collections.emptyMap()); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(randomIntBetween(1, numDocs + 5)) .addSort(SortBuilders.scriptSort(script, ScriptSortBuilder.ScriptSortType.NUMBER)) .addSort(SortBuilders.scoreSort()) @@ -1809,8 +1744,7 @@ public void testScriptFieldSort() throws Exception { { Script script = new Script(ScriptType.INLINE, NAME, "doc['keyword'].value", Collections.emptyMap()); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(randomIntBetween(1, numDocs + 5)) .addSort(SortBuilders.scriptSort(script, ScriptSortBuilder.ScriptSortType.STRING)) .addSort(SortBuilders.scoreSort()) @@ -1838,8 +1772,7 @@ public void testFieldAlias() throws Exception { builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(builders.size()) .addSort(SortBuilders.fieldSort("route_length_miles")) .get(); @@ -1864,8 +1797,7 @@ public void testFieldAliasesWithMissingValues() throws Exception { builders.add(client().prepareIndex("new_index").setSource("route_length_miles", 100.2)); indexRandom(true, true, builders); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(builders.size()) .addSort(SortBuilders.fieldSort("route_length_miles").missing(120.3)) .get(); @@ -1890,8 +1822,7 @@ public void testCastNumericType() throws Exception { indexRandom(true, true, builders); { - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(builders.size()) .addSort(SortBuilders.fieldSort("field").setNumericType("long")) .get(); @@ -1907,8 +1838,7 @@ public void testCastNumericType() throws Exception { } { - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(builders.size()) .addSort(SortBuilders.fieldSort("field").setNumericType("double")) .get(); @@ -1934,8 +1864,7 @@ public void testCastDate() throws Exception { indexRandom(true, true, builders); { - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(2) .addSort(SortBuilders.fieldSort("field").setNumericType("date")) .get(); @@ -1948,8 +1877,7 @@ public void testCastDate() throws Exception { assertEquals(1712879236854L, hits.getAt(0).getSortValues()[0]); assertEquals(1712879237000L, hits.getAt(1).getSortValues()[0]); - response = client().prepareSearch() - .setMaxConcurrentShardRequests(1) + response = prepareSearch().setMaxConcurrentShardRequests(1) .setQuery(matchAllQuery()) .setSize(1) .addSort(SortBuilders.fieldSort("field").setNumericType("date")) @@ -1960,8 +1888,7 @@ public void testCastDate() throws Exception { assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); assertEquals(1712879236854L, hits.getAt(0).getSortValues()[0]); - response = client().prepareSearch() - .setMaxConcurrentShardRequests(1) + response = prepareSearch().setMaxConcurrentShardRequests(1) .setQuery(matchAllQuery()) .setSize(1) .addSort(SortBuilders.fieldSort("field").order(SortOrder.DESC).setNumericType("date")) @@ -1974,8 +1901,7 @@ public void testCastDate() throws Exception { } { - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(2) .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) .get(); @@ -1987,8 +1913,7 @@ public void testCastDate() throws Exception { assertEquals(1712879236854775807L, hits.getAt(0).getSortValues()[0]); assertEquals(1712879237000000000L, hits.getAt(1).getSortValues()[0]); - response = client().prepareSearch() - .setMaxConcurrentShardRequests(1) + response = prepareSearch().setMaxConcurrentShardRequests(1) .setQuery(matchAllQuery()) .setSize(1) .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) @@ -1998,8 +1923,7 @@ public void testCastDate() throws Exception { assertThat(hits.getAt(0).getSortValues()[0].getClass(), equalTo(Long.class)); assertEquals(1712879236854775807L, hits.getAt(0).getSortValues()[0]); - response = client().prepareSearch() - .setMaxConcurrentShardRequests(1) + response = prepareSearch().setMaxConcurrentShardRequests(1) .setQuery(matchAllQuery()) .setSize(1) .addSort(SortBuilders.fieldSort("field").order(SortOrder.DESC).setNumericType("date_nanos")) @@ -2014,8 +1938,7 @@ public void testCastDate() throws Exception { builders.clear(); builders.add(client().prepareIndex("index_date").setSource("field", "1905-04-11T23:47:17")); indexRandom(true, true, builders); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setSize(1) .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) .get(); @@ -2028,8 +1951,7 @@ public void testCastDate() throws Exception { builders.clear(); builders.add(client().prepareIndex("index_date").setSource("field", "2346-04-11T23:47:17")); indexRandom(true, true, builders); - SearchResponse response = client().prepareSearch() - .setQuery(QueryBuilders.rangeQuery("field").gt("1970-01-01")) + SearchResponse response = prepareSearch().setQuery(QueryBuilders.rangeQuery("field").gt("1970-01-01")) .setSize(10) .addSort(SortBuilders.fieldSort("field").setNumericType("date_nanos")) .get(); @@ -2046,8 +1968,7 @@ public void testCastNumericTypeExceptions() throws Exception { for (String numericType : new String[] { "long", "double", "date", "date_nanos" }) { ElasticsearchException exc = expectThrows( ElasticsearchException.class, - () -> client().prepareSearch() - .setQuery(matchAllQuery()) + () -> prepareSearch().setQuery(matchAllQuery()) .addSort(SortBuilders.fieldSort(invalidField).setNumericType(numericType)) .get() ); @@ -2061,7 +1982,6 @@ public void testLongSortOptimizationCorrectResults() { assertAcked( prepareCreate("test1").setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2)) .setMapping("long_field", "type=long") - .get() ); BulkRequestBuilder bulkBuilder = client().prepareBulk(); @@ -2076,11 +1996,8 @@ public void testLongSortOptimizationCorrectResults() { refresh(); // *** 1. sort DESC on long_field - SearchResponse searchResponse = client().prepareSearch() - .addSort(new FieldSortBuilder("long_field").order(SortOrder.DESC)) - .setSize(10) - .get(); - assertSearchResponse(searchResponse); + SearchResponse searchResponse = prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.DESC)).setSize(10).get(); + assertNoFailures(searchResponse); long previousLong = Long.MAX_VALUE; for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { // check the correct sort order @@ -2091,8 +2008,8 @@ public void testLongSortOptimizationCorrectResults() { } // *** 2. sort ASC on long_field - searchResponse = client().prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.ASC)).setSize(10).get(); - assertSearchResponse(searchResponse); + searchResponse = prepareSearch().addSort(new FieldSortBuilder("long_field").order(SortOrder.ASC)).setSize(10).get(); + assertNoFailures(searchResponse); previousLong = Long.MIN_VALUE; for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { // check the correct sort order @@ -2116,11 +2033,10 @@ public void testSortMixedFieldTypes() { refresh(); { // mixing long and integer types is ok, as we convert integer sort to long sort - SearchResponse searchResponse = client().prepareSearch("index_long", "index_integer") - .addSort(new FieldSortBuilder("foo")) + SearchResponse searchResponse = prepareSearch("index_long", "index_integer").addSort(new FieldSortBuilder("foo")) .setSize(10) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); } String errMsg = "Can't sort on field [foo]; the field has incompatible sort types"; @@ -2128,7 +2044,7 @@ public void testSortMixedFieldTypes() { { // mixing long and double types is not allowed SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("index_long", "index_double").addSort(new FieldSortBuilder("foo")).setSize(10).get() + () -> prepareSearch("index_long", "index_double").addSort(new FieldSortBuilder("foo")).setSize(10).get() ); assertThat(exc.getCause().toString(), containsString(errMsg)); } @@ -2136,7 +2052,7 @@ public void testSortMixedFieldTypes() { { // mixing long and keyword types is not allowed SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("index_long", "index_keyword").addSort(new FieldSortBuilder("foo")).setSize(10).get() + () -> prepareSearch("index_long", "index_keyword").addSort(new FieldSortBuilder("foo")).setSize(10).get() ); assertThat(exc.getCause().toString(), containsString(errMsg)); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 49d5bebfdf4a3..777db15b596ec 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -148,8 +148,7 @@ public void testDistanceSortingMVFields() throws Exception { indicesAdmin().prepareRefresh().get(); // Order: Asc - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)) .get(); @@ -162,8 +161,7 @@ public void testDistanceSortingMVFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); // Order: Asc, Mode: max - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC).sortMode(SortMode.MAX)) .get(); @@ -176,8 +174,7 @@ public void testDistanceSortingMVFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); // Order: Desc - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)) .get(); @@ -190,8 +187,7 @@ public void testDistanceSortingMVFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); // Order: Desc, Mode: min - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC).sortMode(SortMode.MIN)) .get(); @@ -203,8 +199,7 @@ public void testDistanceSortingMVFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(421.2d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.ASC)) .get(); @@ -216,8 +211,7 @@ public void testDistanceSortingMVFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(2874d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(5301d, 10d)); - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.AVG).order(SortOrder.DESC)) .get(); @@ -230,8 +224,7 @@ public void testDistanceSortingMVFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(4).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); try { - client().prepareSearch("test") - .setQuery(matchAllQuery()) + prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).sortMode(SortMode.SUM)); fail("sum should not be supported for sorting by geo distance"); } catch (IllegalArgumentException e) { @@ -283,8 +276,7 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { refresh(); // Order: Asc - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.ASC)) .get(); @@ -294,8 +286,7 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); // Order: Desc - searchResponse = client().prepareSearch("test") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)) .get(); @@ -425,8 +416,7 @@ public void testDistanceSortingNestedFields() throws Exception { ); // Order: Asc - SearchResponse searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .order(SortOrder.ASC) @@ -442,8 +432,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(2029.0d, 10d)); // Order: Asc, Mode: max - searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .order(SortOrder.ASC) @@ -460,8 +449,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(8572.0d, 10d)); // Order: Desc - searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .order(SortOrder.DESC) @@ -477,8 +465,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); // Order: Desc, Mode: min - searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .order(SortOrder.DESC) @@ -494,8 +481,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(462.1d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .sortMode(SortMode.AVG) @@ -511,8 +497,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(2874.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(5301.0d, 10d)); - searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .setNestedSort(new NestedSortBuilder("branches")) @@ -528,8 +513,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(2).getSortValues()[0]).doubleValue(), closeTo(1157.0d, 10d)); assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), closeTo(0d, 10d)); - searchResponse = client().prepareSearch("companies") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .setNestedSort(new NestedSortBuilder("branches").setFilter(termQuery("branches.name", "brooklyn"))) @@ -546,8 +530,7 @@ public void testDistanceSortingNestedFields() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(3).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); try { - client().prepareSearch("companies") - .setQuery(matchAllQuery()) + prepareSearch("companies").setQuery(matchAllQuery()) .addSort( SortBuilders.geoDistanceSort("branches.location", 40.7143528, -74.0059731) .sortMode(SortMode.SUM) @@ -584,8 +567,7 @@ public void testGeoDistanceFilter() throws IOException { client().prepareGet("locations", "1").get(); assertHitCount( - client().prepareSearch("locations") - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()) .setPostFilter(QueryBuilders.geoDistanceQuery("pin").geoDistance(GeoDistance.ARC).point(lat, lon).distance("1m")), 1 ); @@ -632,8 +614,7 @@ public void testDistanceSortingWithUnmappedField() throws Exception { refresh(); // Order: Asc - SearchResponse searchResponse = client().prepareSearch("test1", "test2") - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test1", "test2").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).ignoreUnmapped(true).order(SortOrder.ASC)) .get(); @@ -643,8 +624,7 @@ public void testDistanceSortingWithUnmappedField() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), equalTo(Double.POSITIVE_INFINITY)); // Order: Desc - searchResponse = client().prepareSearch("test1", "test2") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test1", "test2").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).ignoreUnmapped(true).order(SortOrder.DESC)) .get(); @@ -655,8 +635,7 @@ public void testDistanceSortingWithUnmappedField() throws Exception { assertThat(((Number) searchResponse.getHits().getAt(1).getSortValues()[0]).doubleValue(), closeTo(5286d, 10d)); // Make sure that by default the unmapped fields continue to fail - searchResponse = client().prepareSearch("test1", "test2") - .setQuery(matchAllQuery()) + searchResponse = prepareSearch("test1", "test2").setQuery(matchAllQuery()) .addSort(SortBuilders.geoDistanceSort("locations", 40.7143528, -74.0059731).order(SortOrder.DESC)) .get(); assertThat(searchResponse.getFailedShards(), greaterThan(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 0862d919843db..54d730cec2bc3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -84,8 +84,7 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce q[0] = new GeoPoint(2, 1); } - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.ASC)) .get(); assertOrderedSearchHits(searchResponse, "d1", "d2"); @@ -98,8 +97,7 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce closeTo(GeoDistance.ARC.calculate(2, 1, 5, 1, DistanceUnit.METERS), 10d) ); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.DESC)) .get(); assertOrderedSearchHits(searchResponse, "d2", "d1"); @@ -112,8 +110,7 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce closeTo(GeoDistance.ARC.calculate(2, 2, 3, 2, DistanceUnit.METERS), 10d) ); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.ASC)) .get(); assertOrderedSearchHits(searchResponse, "d1", "d2"); @@ -126,8 +123,7 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce closeTo(GeoDistance.ARC.calculate(2, 1, 6, 2, DistanceUnit.METERS), 10d) ); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.DESC)) .get(); assertOrderedSearchHits(searchResponse, "d2", "d1"); @@ -168,8 +164,7 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc ); GeoPoint q = new GeoPoint(0, 0); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.AVG).order(SortOrder.ASC)) .get(); assertOrderedSearchHits(searchResponse, "d2", "d1"); @@ -182,8 +177,7 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc closeTo(GeoDistance.ARC.calculate(0, 0, 0, 5, DistanceUnit.METERS), 10d) ); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MEDIAN).order(SortOrder.ASC)) .get(); assertOrderedSearchHits(searchResponse, "d1", "d2"); @@ -251,8 +245,7 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept } } - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) .get(); assertOrderedSearchHits(searchResponse, "d1", "d2"); @@ -265,8 +258,7 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept closeTo(GeoDistance.ARC.calculate(4.5, 1, 2, 1, DistanceUnit.METERS), 1.e-1) ); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(geoDistanceSortBuilder.sortMode(SortMode.MAX).order(SortOrder.ASC)) .get(); assertOrderedSearchHits(searchResponse, "d1", "d2"); @@ -297,50 +289,41 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) .get(); checkCorrectSortOrderForGeoSort(searchResponse); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) .get(); checkCorrectSortOrderForGeoSort(searchResponse); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)) .get(); checkCorrectSortOrderForGeoSort(searchResponse); - searchResponse = client().prepareSearch() - .setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))) + searchResponse = prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))) .get(); checkCorrectSortOrderForGeoSort(searchResponse); - searchResponse = client().prepareSearch() - .setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0"))) - .get(); + searchResponse = prepareSearch().setSource( + new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0")) + ).get(); checkCorrectSortOrderForGeoSort(searchResponse); - searchResponse = client().prepareSearch() - .setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))) + searchResponse = prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))) .get(); checkCorrectSortOrderForGeoSort(searchResponse); - searchResponse = client().prepareSearch() - .setSource( - new SearchSourceBuilder().sort( - SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE) - ) - ) - .get(); + searchResponse = prepareSearch().setSource( + new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE)) + ).get(); checkCorrectSortOrderForGeoSort(searchResponse); } @@ -358,9 +341,9 @@ private static void checkCorrectSortOrderForGeoSort(SearchResponse searchRespons public void testCrossIndexIgnoreUnmapped() throws Exception { assertAcked( - prepareCreate("test1").setMapping("str_field", "type=keyword", "long_field", "type=long", "double_field", "type=double").get() + prepareCreate("test1").setMapping("str_field", "type=keyword", "long_field", "type=long", "double_field", "type=double") ); - assertAcked(prepareCreate("test2").get()); + assertAcked(prepareCreate("test2")); indexRandom( true, @@ -369,24 +352,21 @@ public void testCrossIndexIgnoreUnmapped() throws Exception { ); assertSortValues( - client().prepareSearch("test1", "test2") - .addSort(fieldSort("str_field").order(SortOrder.ASC).unmappedType("keyword")) + prepareSearch("test1", "test2").addSort(fieldSort("str_field").order(SortOrder.ASC).unmappedType("keyword")) .addSort(fieldSort("str_field2").order(SortOrder.DESC).unmappedType("keyword")), new Object[] { "bcd", null }, new Object[] { null, null } ); assertSortValues( - client().prepareSearch("test1", "test2") - .addSort(fieldSort("long_field").order(SortOrder.ASC).unmappedType("long")) + prepareSearch("test1", "test2").addSort(fieldSort("long_field").order(SortOrder.ASC).unmappedType("long")) .addSort(fieldSort("long_field2").order(SortOrder.DESC).unmappedType("long")), new Object[] { 3L, Long.MIN_VALUE }, new Object[] { Long.MAX_VALUE, Long.MIN_VALUE } ); assertSortValues( - client().prepareSearch("test1", "test2") - .addSort(fieldSort("double_field").order(SortOrder.ASC).unmappedType("double")) + prepareSearch("test1", "test2").addSort(fieldSort("double_field").order(SortOrder.ASC).unmappedType("double")) .addSort(fieldSort("double_field2").order(SortOrder.DESC).unmappedType("double")), new Object[] { 0.65, Double.NEGATIVE_INFINITY }, new Object[] { Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index d5654c8458229..0e430c9618bc8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -181,8 +181,7 @@ public void testSimpleSorts() throws Exception { Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", Collections.emptyMap()); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(size) .addSort(new ScriptSortBuilder(script, ScriptSortType.STRING)) .get(); @@ -198,7 +197,7 @@ public void testSimpleSorts() throws Exception { } size = 1 + random.nextInt(10); - searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("str_value", SortOrder.DESC).get(); assertHitCount(searchResponse, 10); assertThat(searchResponse.getHits().getHits().length, equalTo(size)); @@ -261,8 +260,7 @@ public void testSortMinValueScript() throws IOException { indicesAdmin().prepareRefresh("test").get(); // test the long values - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min long", Collections.emptyMap())) .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) .setSize(10) @@ -277,8 +275,7 @@ public void testSortMinValueScript() throws IOException { } // test the double values - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min double", Collections.emptyMap())) .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) .setSize(10) @@ -293,8 +290,7 @@ public void testSortMinValueScript() throws IOException { } // test the string values - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min string", Collections.emptyMap())) .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) .setSize(10) @@ -309,8 +305,7 @@ public void testSortMinValueScript() throws IOException { } // test the geopoint values - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addScriptField("min", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "get min geopoint lon", Collections.emptyMap())) .addSort(SortBuilders.fieldSort("ord").order(SortOrder.ASC).unmappedType("long")) .setSize(10) @@ -355,8 +350,7 @@ public void testDocumentsWithNullValue() throws Exception { Script scripField = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'].value", Collections.emptyMap()); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .addScriptField("id", scripField) .addSort("svalue", SortOrder.ASC) .get(); @@ -368,8 +362,7 @@ public void testDocumentsWithNullValue() throws Exception { assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + searchResponse = prepareSearch().setQuery(matchAllQuery()) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) .addSort("svalue", SortOrder.ASC) .get(); @@ -381,11 +374,7 @@ public void testDocumentsWithNullValue() throws Exception { assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); - searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .addScriptField("id", scripField) - .addSort("svalue", SortOrder.DESC) - .get(); + searchResponse = prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.DESC).get(); if (searchResponse.getFailedShards() > 0) { logger.warn("Failed shards:"); @@ -401,8 +390,7 @@ public void testDocumentsWithNullValue() throws Exception { assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); // a query with docs just with null values - searchResponse = client().prepareSearch() - .setQuery(termQuery("id", "2")) + searchResponse = prepareSearch().setQuery(termQuery("id", "2")) .addScriptField("id", scripField) .addSort("svalue", SortOrder.DESC) .get(); @@ -443,8 +431,6 @@ public void test2920() throws IOException { refresh(); Script sortScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "\u0027\u0027", Collections.emptyMap()); - assertNoFailures( - client().prepareSearch().setQuery(matchAllQuery()).addSort(scriptSort(sortScript, ScriptSortType.STRING)).setSize(10) - ); + assertNoFailures(prepareSearch().setQuery(matchAllQuery()).addSort(scriptSort(sortScript, ScriptSortType.STRING)).setSize(10)); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 710905cf3302d..1860082c833ad 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -34,12 +34,12 @@ public void testSimple() { client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true).get(); + SearchResponse response = prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true).get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); - response = client().prepareSearch("test").storedFields("_none_").get(); + response = prepareSearch("test").storedFields("_none_").get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); } @@ -50,8 +50,7 @@ public void testInnerHits() { client().prepareIndex("test").setId("1").setSource("field", "value", "nested", Collections.singletonMap("title", "foo")).get(); refresh(); - SearchResponse response = client().prepareSearch("test") - .storedFields("_none_") + SearchResponse response = prepareSearch("test").storedFields("_none_") .setFetchSource(false) .setQuery( new NestedQueryBuilder("nested", new TermQueryBuilder("nested.title", "foo"), ScoreMode.Total).innerHit( @@ -77,12 +76,12 @@ public void testWithRouting() { client().prepareIndex("test").setId("1").setSource("field", "value").setRouting("toto").get(); refresh(); - SearchResponse response = client().prepareSearch("test").storedFields("_none_").setFetchSource(false).get(); + SearchResponse response = prepareSearch("test").storedFields("_none_").setFetchSource(false).get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).field("_routing"), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - response = client().prepareSearch("test").storedFields("_none_").get(); + response = prepareSearch("test").storedFields("_none_").get(); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); } @@ -97,7 +96,7 @@ public void testInvalid() { { SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test").setFetchSource(true).storedFields("_none_").get() + () -> prepareSearch("test").setFetchSource(true).storedFields("_none_").get() ); Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchException.class); assertNotNull(rootCause); @@ -107,7 +106,7 @@ public void testInvalid() { { SearchPhaseExecutionException exc = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch("test").storedFields("_none_").addFetchField("field").get() + () -> prepareSearch("test").storedFields("_none_").addFetchField("field").get() ); Throwable rootCause = ExceptionsHelper.unwrap(exc, SearchException.class); assertNotNull(rootCause); @@ -117,14 +116,14 @@ public void testInvalid() { { IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("test").storedFields("_none_", "field1").setVersion(true).get() + () -> prepareSearch("test").storedFields("_none_", "field1").setVersion(true).get() ); assertThat(exc.getMessage(), equalTo("cannot combine _none_ with other fields")); } { IllegalArgumentException exc = expectThrows( IllegalArgumentException.class, - () -> client().prepareSearch("test").storedFields("_none_").storedFields("field1").setVersion(true).get() + () -> prepareSearch("test").storedFields("_none_").storedFields("field1").setVersion(true).get() ); assertThat(exc.getMessage(), equalTo("cannot combine _none_ with other fields")); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index ac6c1a7c2d554..3fcbc5cf4add6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -23,13 +23,13 @@ public void testSourceDefaultBehavior() { indexDoc("test", "1", "field", "value"); refresh(); - SearchResponse response = client().prepareSearch("test").get(); + SearchResponse response = prepareSearch("test").get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - response = client().prepareSearch("test").addStoredField("bla").get(); + response = prepareSearch("test").addStoredField("bla").get(); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - response = client().prepareSearch("test").addStoredField("_source").get(); + response = prepareSearch("test").addStoredField("_source").get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); } @@ -41,22 +41,22 @@ public void testSourceFiltering() { client().prepareIndex("test").setId("1").setSource("field1", "value", "field2", "value2").get(); refresh(); - SearchResponse response = client().prepareSearch("test").setFetchSource(false).get(); + SearchResponse response = prepareSearch("test").setFetchSource(false).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - response = client().prepareSearch("test").setFetchSource(true).get(); + response = prepareSearch("test").setFetchSource(true).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - response = client().prepareSearch("test").setFetchSource("field1", null).get(); + response = prepareSearch("test").setFetchSource("field1", null).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - response = client().prepareSearch("test").setFetchSource("hello", null).get(); + response = prepareSearch("test").setFetchSource("hello", null).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); - response = client().prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }).get(); + response = prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); @@ -74,12 +74,12 @@ public void testSourceWithWildcardFiltering() { client().prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - SearchResponse response = client().prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null).get(); + SearchResponse response = prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - response = client().prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null).get(); + response = prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null).get(); assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java index 1e9b4fdbd7cd9..32f5e14b944a2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/FieldUsageStatsIT.java @@ -73,8 +73,7 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio assertFalse(stats.hasField("field2")); assertFalse(stats.hasField("date_field")); - SearchResponse searchResponse = client().prepareSearch() - .setSearchType(SearchType.DEFAULT) + SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DEFAULT) .setQuery(QueryBuilders.termQuery("field", "value")) .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) .addAggregation(AggregationBuilders.filter("agg2", QueryBuilders.spanTermQuery("field2", "value2"))) @@ -114,8 +113,7 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio assertEquals(Set.of(UsageContext.DOC_VALUES), stats.get("field.keyword").keySet()); assertEquals(1L * numShards, stats.get("field.keyword").getDocValues()); - client().prepareSearch() - .setSearchType(SearchType.DEFAULT) + prepareSearch().setSearchType(SearchType.DEFAULT) .setQuery(QueryBuilders.termQuery("field", "value")) .addAggregation(AggregationBuilders.terms("agg1").field("field.keyword")) .setSize(0) @@ -144,8 +142,7 @@ public void testFieldUsageStats() throws ExecutionException, InterruptedExceptio .getTotal() .getQueryCount() ); - client().prepareSearch() - .setSearchType(SearchType.DEFAULT) + prepareSearch().setSearchType(SearchType.DEFAULT) .setPreFilterShardSize(1) .setQuery(QueryBuilders.rangeQuery("date_field").from("2016/01/01")) .setSize(100) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java index fa996a6bd0e89..07e8c516eda41 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/stats/SearchStatsIT.java @@ -39,7 +39,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -188,12 +188,11 @@ public void testOpenContexts() { assertThat(indicesStats.getTotal().getSearch().getOpenContexts(), equalTo(0L)); int size = scaledRandomIntBetween(1, docs); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setSize(size) .setScroll(TimeValue.timeValueMinutes(2)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); // refresh the stats now that scroll contexts are opened indicesStats = indicesAdmin().prepareStats(index).get(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 93a2000cda7a4..9592d3904a90d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -159,19 +159,17 @@ public void testTextAndGlobalText() throws Exception { } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder noText = SuggestBuilders.completionSuggestion(FIELD); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", noText).setGlobalText("sugg")) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", noText).setGlobalText("sugg") + ).get(); assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); CompletionSuggestionBuilder withText = SuggestBuilders.completionSuggestion(FIELD).text("sugg"); - searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", withText)).get(); + searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", withText)).get(); assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); // test that suggestion text takes precedence over global text - searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", withText).setGlobalText("bogus")) - .get(); + searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", withText).setGlobalText("bogus")).get(); assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); } @@ -280,7 +278,7 @@ public void testSuggestDocument() throws Exception { indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").size(numDocs); - SearchResponse searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)).get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)).get(); CompletionSuggestion completionSuggestion = searchResponse.getSuggest().getSuggestion("foo"); CompletionSuggestion.Entry options = completionSuggestion.getEntries().get(0); assertThat(options.getOptions().size(), equalTo(numDocs)); @@ -316,8 +314,7 @@ public void testSuggestDocumentNoSource() throws Exception { indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").size(numDocs); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", prefix)) + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)) .setFetchSource(false) .get(); CompletionSuggestion completionSuggestion = searchResponse.getSuggest().getSuggestion("foo"); @@ -357,8 +354,7 @@ public void testSuggestDocumentSourceFiltering() throws Exception { indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").size(numDocs); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", prefix)) + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)) .setFetchSource("a", "b") .get(); CompletionSuggestion completionSuggestion = searchResponse.getSuggest().getSuggestion("foo"); @@ -385,8 +381,7 @@ public void testSuggestEmptyIndex() throws IOException { createIndexAndMapping(mapping); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("v"); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", prefix)) + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", prefix)) .setFetchSource("a", "b") .get(); Suggest suggest = searchResponse.getSuggest(); @@ -466,9 +461,9 @@ public void testThatWeightCanBeAString() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("testSuggestions", new CompletionSuggestionBuilder(FIELD).text("test").size(10))) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("testSuggestions", new CompletionSuggestionBuilder(FIELD).text("test").size(10)) + ).get(); assertSuggestions(searchResponse, "testSuggestions", "testing"); Suggest.Suggestion.Entry.Option option = searchResponse.getSuggest() @@ -696,11 +691,9 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { .get(); assertThat(putMappingResponse.isAcknowledged(), is(true)); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion("suggs", SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("suggs", SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) + ).get(); assertSuggestions(searchResponse, "suggs"); client().prepareIndex(INDEX) @@ -710,11 +703,9 @@ public void testThatUpgradeToMultiFieldsWorks() throws Exception { .get(); ensureGreen(INDEX); - SearchResponse afterReindexingResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion("suggs", SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ) - .get(); + SearchResponse afterReindexingResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("suggs", SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) + ).get(); assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); } @@ -730,19 +721,14 @@ public void testThatFuzzySuggesterWorks() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv").size(10))) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv").size(10)) + ).get(); assertSuggestions(searchResponse, false, "foo", "Nirvana"); - searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10) - ) - ) - .get(); + searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10)) + ).get(); assertSuggestions(searchResponse, false, "foo", "Nirvana"); } @@ -759,25 +745,15 @@ public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { refresh(); // edit distance 1 - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10) - ) - ) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10)) + ).get(); assertSuggestions(searchResponse, false, "foo"); // edit distance 2 - searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10) - ) - ) - .get(); + searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10)) + ).get(); assertSuggestions(searchResponse, false, "foo", "Nirvana"); } @@ -793,26 +769,17 @@ public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD) - .prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()) - .size(10) - ) + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion( + "foo", + SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", FuzzyOptions.builder().setTranspositions(false).build()).size(10) ) - .get(); + ).get(); assertSuggestions(searchResponse, false, "foo"); - searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", Fuzziness.ONE).size(10) - ) - ) - .get(); + searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", Fuzziness.ONE).size(10)) + ).get(); assertSuggestions(searchResponse, false, "foo", "Nirvana"); } @@ -828,28 +795,20 @@ public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD) - .prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()) - .size(10) - ) + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion( + "foo", + SuggestBuilders.completionSuggestion(FIELD).prefix("Nriva", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ) - .get(); + ).get(); assertSuggestions(searchResponse, false, "foo"); - searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD) - .prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()) - .size(10) - ) + searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion( + "foo", + SuggestBuilders.completionSuggestion(FIELD).prefix("Nrivan", FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10) ) - .get(); + ).get(); assertSuggestions(searchResponse, false, "foo", "Nirvana"); } @@ -865,28 +824,20 @@ public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { refresh(); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD) - .prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()) - .size(10) - ) + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion( + "foo", + SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ) - .get(); + ).get(); assertSuggestions(searchResponse, false, "foo"); - searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion( - "foo", - SuggestBuilders.completionSuggestion(FIELD) - .prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()) - .size(10) - ) + searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion( + "foo", + SuggestBuilders.completionSuggestion(FIELD).prefix("Nirvo", FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10) ) - .get(); + ).get(); assertSuggestions(searchResponse, false, "foo", "Nirvana"); } @@ -906,8 +857,7 @@ public void testThatFuzzySuggesterIsUnicodeAware() throws Exception { .prefix("öööи", FuzzyOptions.builder().setUnicodeAware(true).build()) .size(10); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)) + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)) .get(); assertSuggestions(searchResponse, false, "foo", "ööööö"); @@ -915,18 +865,14 @@ public void testThatFuzzySuggesterIsUnicodeAware() throws Exception { completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD) .prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()) .size(10); - searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)) - .get(); + searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)).get(); assertSuggestions(searchResponse, false, "foo"); // increasing edit distance instead of unicode awareness works again, as this is only a single character completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD) .prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).setFuzziness(Fuzziness.TWO).build()) .size(10); - searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)) - .get(); + searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)).get(); assertSuggestions(searchResponse, false, "foo", "ööööö"); } @@ -967,12 +913,11 @@ public void testThatStatsAreWorking() throws Exception { refresh(); ensureGreen(); // load the fst index into ram - client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f"))) - .get(); - client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f"))) + prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(FIELD).prefix("f"))) .get(); + prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("foo", SuggestBuilders.completionSuggestion(otherField).prefix("f")) + ).get(); // Get all stats IndicesStatsResponse indicesStatsResponse = indicesAdmin().prepareStats(INDEX).setIndices(INDEX).setCompletion(true).get(); @@ -1020,7 +965,7 @@ public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exce SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch(INDEX).addSort(new FieldSortBuilder(FIELD)).get() + () -> prepareSearch(INDEX).addSort(new FieldSortBuilder(FIELD)).get() ); assertThat(e.status().getStatus(), is(400)); assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]")); @@ -1143,15 +1088,14 @@ public void testSkipDuplicates() throws Exception { .skipDuplicates(true) .size(numUnique); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion("suggestions", completionSuggestionBuilder)) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion("suggestions", completionSuggestionBuilder) + ).get(); assertSuggestions(searchResponse, true, "suggestions", expected); } public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)) + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)) .get(); assertSuggestions(searchResponse, suggestionName, suggestions); } @@ -1164,11 +1108,9 @@ public void assertSuggestions(String suggestion, String... suggestions) { public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) { String suggestionName = RandomStrings.randomAsciiLettersOfLength(random(), 10); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest( - new SuggestBuilder().addSuggestion(suggestionName, SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10)) - ) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest( + new SuggestBuilder().addSuggestion(suggestionName, SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10)) + ).get(); assertSuggestions(searchResponse, false, suggestionName, suggestions); } @@ -1279,7 +1221,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi mapping = mapping.endObject().endObject().endObject().endObject(); assertAcked( - indicesAdmin().prepareCreate(INDEX).setSettings(Settings.builder().put(indexSettings()).put(settings)).setMapping(mapping).get() + indicesAdmin().prepareCreate(INDEX).setSettings(Settings.builder().put(indexSettings()).put(settings)).setMapping(mapping) ); } @@ -1309,7 +1251,7 @@ public void testPrunedSegments() throws IOException { refresh(); assertSuggestions("b"); - assertThat(2L, equalTo(client().prepareSearch(INDEX).setSize(0).get().getHits().getTotalHits().value)); + assertThat(2L, equalTo(prepareSearch(INDEX).setSize(0).get().getHits().getTotalHits().value)); for (IndexShardSegments seg : indicesAdmin().prepareSegments().get().getIndices().get(INDEX)) { ShardSegments[] shards = seg.shards(); for (ShardSegments shardSegments : shards) { @@ -1333,7 +1275,6 @@ public void testVeryLongInput() throws IOException { .endObject() .endObject() ) - .get() ); // can cause stack overflow without the default max_input_length String longString = replaceReservedChars(randomRealisticUnicodeOfLength(randomIntBetween(5000, 10000)), (char) 0x01); @@ -1362,7 +1303,6 @@ public void testReservedChars() throws IOException { .endObject() .endObject() ) - .get() ); // can cause stack overflow without the default max_input_length String string = "foo" + (char) 0x00 + "bar"; @@ -1400,7 +1340,6 @@ public void testIssue5930() throws IOException { .endObject() .endObject() ) - .get() ); String string = "foo bar"; client().prepareIndex(INDEX) @@ -1411,11 +1350,9 @@ public void testIssue5930() throws IOException { SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch(INDEX) - .addAggregation( - AggregationBuilders.terms("suggest_agg").field(FIELD).collectMode(randomFrom(SubAggCollectionMode.values())) - ) - .get() + () -> prepareSearch(INDEX).addAggregation( + AggregationBuilders.terms("suggest_agg").field(FIELD).collectMode(randomFrom(SubAggCollectionMode.values())) + ).get() ); assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]")); } @@ -1492,8 +1429,7 @@ public void testSuggestOnlyExplain() throws Exception { } indexRandom(true, indexRequestBuilders); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .setExplain(true) + SearchResponse searchResponse = prepareSearch(INDEX).setExplain(true) .suggest(new SuggestBuilder().addSuggestion("foo", prefix)) .get(); assertSuggestions(searchResponse, "foo", "suggestion10", "suggestion9", "suggestion8", "suggestion7", "suggestion6"); @@ -1515,7 +1451,7 @@ public void testCompletionWithCollapse() throws Exception { String index = "test"; assertAcked( - indicesAdmin().prepareCreate(index).setSettings(Settings.builder().put("index.number_of_shards", 2)).setMapping(mapping).get() + indicesAdmin().prepareCreate(index).setSettings(Settings.builder().put("index.number_of_shards", 2)).setMapping(mapping) ); int numDocs = 2; @@ -1528,8 +1464,7 @@ public void testCompletionWithCollapse() throws Exception { indicesAdmin().prepareRefresh(index).get(); CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(suggestField).prefix("sug").size(1); - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse searchResponse = prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()) .setFrom(1) .setSize(1) .setCollapse(new CollapseBuilder("collapse_field")) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 7b2ba7cb46770..a526781bcc3db 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -619,9 +619,7 @@ public void testGeoField() throws Exception { Collections.singletonList(GeoQueryContext.builder().setGeoPoint(new GeoPoint(52.52, 13.4)).build()) ) ); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion(suggestionName, context)) - .get(); + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, context)).get(); assertEquals(searchResponse.getSuggest().size(), 1); assertEquals( @@ -671,8 +669,7 @@ public void testSkipDuplicatesWithContexts() throws Exception { } public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { - SearchResponse searchResponse = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)) + SearchResponse searchResponse = prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)) .get(); CompletionSuggestSearchIT.assertSuggestions(searchResponse, suggestionName, suggestions); } @@ -728,7 +725,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi mapping.endObject().endObject().endObject(); assertAcked( - indicesAdmin().prepareCreate(INDEX).setSettings(Settings.builder().put(indexSettings()).put(settings)).setMapping(mapping).get() + indicesAdmin().prepareCreate(INDEX).setSettings(Settings.builder().put(indexSettings()).put(settings)).setMapping(mapping) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/SuggestSearchIT.java index df3442b1cc58a..95eb0f055b830 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/suggest/SuggestSearchIT.java @@ -245,7 +245,7 @@ public void testSizeOneShard() throws Exception { } refresh(); - SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellchecker")).get(); + SearchResponse search = prepareSearch().setQuery(matchQuery("text", "spellchecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggestion = termSuggestion("text").suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can @@ -308,12 +308,12 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut candidateGenerator("name").prefixLength(0).minWordLength(0).suggestMode("always").maxEdits(2) ).gramSize(3); { - SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0); + SearchRequestBuilder searchBuilder = prepareSearch().setSize(0); searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion)); assertRequestBuilderThrows(searchBuilder, SearchPhaseExecutionException.class); } { - SearchRequestBuilder searchBuilder = client().prepareSearch().setSize(0); + SearchRequestBuilder searchBuilder = prepareSearch().setSize(0); searchBuilder.suggest(new SuggestBuilder().setGlobalText("tetsting sugestion").addSuggestion("did_you_mean", phraseSuggestion)); assertRequestBuilderThrows(searchBuilder, SearchPhaseExecutionException.class); } @@ -329,7 +329,7 @@ public void testSimple() throws Exception { indexDoc("test", "4", "text", "abcc"); refresh(); - SearchResponse search = client().prepareSearch().setQuery(matchQuery("text", "spellcecker")).get(); + SearchResponse search = prepareSearch().setQuery(matchQuery("text", "spellcecker")).get(); assertThat("didn't ask for suggestions but got some", search.getSuggest(), nullValue()); TermSuggestionBuilder termSuggest = termSuggestion("text").suggestMode(SuggestMode.ALWAYS) // Always, otherwise the results can vary @@ -828,8 +828,7 @@ public void testShardFailures() throws IOException, InterruptedException { refresh(); // When searching on a shard with a non existing mapping, we should fail - SearchRequestBuilder request = client().prepareSearch() - .setSize(0) + SearchRequestBuilder request = prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion") .addSuggestion("did_you_mean", phraseSuggestion("fielddoesnotexist").maxErrors(5.0f)) @@ -837,8 +836,7 @@ public void testShardFailures() throws IOException, InterruptedException { assertRequestBuilderThrows(request, SearchPhaseExecutionException.class); // When searching on a shard which does not hold yet any document of an existing type, we should not fail - SearchResponse searchResponse = client().prepareSearch() - .setSize(0) + SearchResponse searchResponse = prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion") .addSuggestion("did_you_mean", phraseSuggestion("name").maxErrors(5.0f)) @@ -878,8 +876,7 @@ public void testEmptyShards() throws IOException, InterruptedException { ensureGreen(); // test phrase suggestion on completely empty index - SearchResponse searchResponse = client().prepareSearch() - .setSize(0) + SearchResponse searchResponse = prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion") .addSuggestion("did_you_mean", phraseSuggestion("name").maxErrors(5.0f)) @@ -897,8 +894,7 @@ public void testEmptyShards() throws IOException, InterruptedException { refresh(); // test phrase suggestion but nothing matches - searchResponse = client().prepareSearch() - .setSize(0) + searchResponse = prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion") .addSuggestion("did_you_mean", phraseSuggestion("name").maxErrors(5.0f)) @@ -914,8 +910,7 @@ public void testEmptyShards() throws IOException, InterruptedException { indexDoc("test", "1", "name", "Just testing the suggestions api"); refresh(); - searchResponse = client().prepareSearch() - .setSize(0) + searchResponse = prepareSearch().setSize(0) .suggest( new SuggestBuilder().setGlobalText("tetsting sugestion") .addSuggestion("did_you_mean", phraseSuggestion("name").maxErrors(5.0f)) @@ -1416,7 +1411,7 @@ protected Suggest searchSuggest(String suggestText, String name, SuggestionBuild } protected Suggest searchSuggest(String suggestText, int expectShardsFailed, Map> suggestions) { - SearchRequestBuilder builder = client().prepareSearch().setSize(0); + SearchRequestBuilder builder = prepareSearch().setSize(0); SuggestBuilder suggestBuilder = new SuggestBuilder(); if (suggestText != null) { suggestBuilder.setGlobalText(suggestText); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java index c43f13be7d100..5c1f925bddc49 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java @@ -55,17 +55,11 @@ public void testCustomBM25Similarity() throws Exception { .execute() .actionGet(); - SearchResponse bm25SearchResponse = client().prepareSearch() - .setQuery(matchQuery("field1", "quick brown fox")) - .execute() - .actionGet(); + SearchResponse bm25SearchResponse = prepareSearch().setQuery(matchQuery("field1", "quick brown fox")).execute().actionGet(); assertThat(bm25SearchResponse.getHits().getTotalHits().value, equalTo(1L)); float bm25Score = bm25SearchResponse.getHits().getHits()[0].getScore(); - SearchResponse booleanSearchResponse = client().prepareSearch() - .setQuery(matchQuery("field2", "quick brown fox")) - .execute() - .actionGet(); + SearchResponse booleanSearchResponse = prepareSearch().setQuery(matchQuery("field2", "quick brown fox")).execute().actionGet(); assertThat(booleanSearchResponse.getHits().getTotalHits().value, equalTo(1L)); float defaultScore = booleanSearchResponse.getHits().getHits()[0].getScore(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java index 96a2fe69732df..d68301a310722 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/ConcurrentSnapshotsIT.java @@ -941,7 +941,7 @@ public void testQueuedSnapshotsWaitingForShardReady() throws Exception { indexDoc(testIndex, Integer.toString(i), "foo", "bar" + i); } refresh(); - assertThat(client().prepareSearch(testIndex).setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); + assertThat(prepareSearch(testIndex).setSize(0).get().getHits().getTotalHits().value, equalTo(100L)); logger.info("--> start relocations"); allowNodes(testIndex, 1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index bf7292554bf53..e188c11125c42 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -247,7 +247,6 @@ public void testRestoreIndexWithMissingShards() throws Exception { assertAcked( prepareCreate("test-idx-none", 1, indexSettingsNoReplicas(6).put("index.routing.allocation.include.tag", "nowhere")) .setWaitForActiveShards(ActiveShardCount.NONE) - .get() ); assertTrue(indexExists("test-idx-none")); @@ -1095,14 +1094,12 @@ public void testSnapshotDeleteRelocatingPrimaryIndex() throws Exception { // Drop all file chunk requests so that below relocation takes forever and we're guaranteed to run the snapshot in parallel to it for (String nodeName : dataNodes) { - ((MockTransportService) internalCluster().getInstance(TransportService.class, nodeName)).addSendBehavior( - (connection, requestId, action, request, options) -> { - if (PeerRecoveryTargetService.Actions.FILE_CHUNK.equals(action)) { - return; - } - connection.sendRequest(requestId, action, request, options); + MockTransportService.getInstance(nodeName).addSendBehavior((connection, requestId, action, request, options) -> { + if (PeerRecoveryTargetService.Actions.FILE_CHUNK.equals(action)) { + return; } - ); + connection.sendRequest(requestId, action, request, options); + }); } logger.info("--> start relocations"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java index ac8e1f67cd9c1..d171dd2c89c78 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/MetadataLoadingDuringSnapshotRestoreIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoryMetadata; @@ -193,7 +192,7 @@ public Metadata getSnapshotGlobalMetadata(SnapshotId snapshotId) { public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId indexId) throws IOException { indicesMetadata.computeIfAbsent(key(snapshotId.getName(), indexId.getName()), (s) -> new AtomicInteger(0)).incrementAndGet(); - return super.getSnapshotIndexMetaData(PlainActionFuture.get(this::getRepositoryData), snapshotId, indexId); + return super.getSnapshotIndexMetaData(AbstractSnapshotIntegTestCase.getRepositoryData(this), snapshotId, indexId); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java index 5f4c270f69348..2005d63ab6413 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoriesIT.java @@ -257,7 +257,6 @@ public void testRepositoryConflict() throws Exception { .put("random", randomAlphaOfLength(10)) .put("wait_after_unblock", 200) ) - .get() ); logger.info("--> snapshot"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java index c541d157b3c63..cd34f68471156 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RestoreSnapshotIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.blobstore.FileRestoreContext; @@ -336,7 +337,6 @@ public void testRestoreAliases() throws Exception { .addAlias("test-idx-2", "alias-123") .addAlias("test-idx-3", "alias-123") .addAlias("test-idx-1", "alias-1") - .get() ); assertFalse(indicesAdmin().prepareGetAliases("alias-123").get().getAliases().isEmpty()); @@ -397,7 +397,7 @@ public void testRestoreTemplates() throws Exception { createRepository("test-repo", "fs"); logger.info("--> creating test template"); - assertThat( + assertAcked( indicesAdmin().preparePutTemplate("test-template") .setPatterns(Collections.singletonList("te*")) .setMapping( @@ -417,9 +417,6 @@ public void testRestoreTemplates() throws Exception { .endObject() .endObject() ) - .get() - .isAcknowledged(), - equalTo(true) ); createSnapshot("test-repo", "test-snap", Collections.emptyList()); @@ -905,7 +902,7 @@ public void testFailOnAncientVersion() throws Exception { final String repoName = "test-repo"; final Path repoPath = randomRepoPath(); createRepository(repoName, FsRepository.TYPE, repoPath); - final IndexVersion oldVersion = IndexVersion.fromId(IndexVersion.MINIMUM_COMPATIBLE.id() - 1); + final IndexVersion oldVersion = IndexVersion.fromId(IndexVersions.MINIMUM_COMPATIBLE.id() - 1); final String oldSnapshot = initWithSnapshotVersion(repoName, repoPath, oldVersion); final SnapshotRestoreException snapshotRestoreException = expectThrows( SnapshotRestoreException.class, @@ -917,7 +914,7 @@ public void testFailOnAncientVersion() throws Exception { "the snapshot was created with Elasticsearch version [" + oldVersion + "] which is below the current versions minimum index compatibility version [" - + IndexVersion.MINIMUM_COMPATIBLE + + IndexVersions.MINIMUM_COMPATIBLE + "]" ) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 8a617a1243ac1..7f5cacdfc935a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1615,7 +1615,7 @@ public void testRestoreSnapshotWithCorruptedGlobalState() throws Exception { assertThat(restoreSnapshotResponse.getRestoreInfo().successfulShards(), equalTo(snapshotInfo.successfulShards())); ensureGreen("test-idx-1", "test-idx-2"); - assertHitCount(client().prepareSearch("test-idx-*").setSize(0), 3); + assertHitCount(prepareSearch("test-idx-*").setSize(0), 3); } /** @@ -1684,7 +1684,7 @@ public void testRestoreSnapshotWithCorruptedIndexMetadata() throws Exception { ensureGreen(); for (Map.Entry entry : nbDocsPerIndex.entrySet()) { if (isRestorableIndex.test(entry.getKey())) { - assertHitCount(client().prepareSearch(entry.getKey()).setSize(0), entry.getValue().longValue()); + assertHitCount(prepareSearch(entry.getKey()).setSize(0), entry.getValue().longValue()); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java index cdf76bea1cf04..4721b1a186a99 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java @@ -1031,11 +1031,15 @@ private void startPartialSnapshotter() { final Releasable abortReleasable = abortReleasables.transfer(); abortRunnable = mustSucceed(() -> { - logger.info("--> aborting/deleting snapshot [{}:{}]", trackedRepository.repositoryName, snapshotName); + logger.info("--> abort/delete snapshot [{}:{}] start", trackedRepository.repositoryName, snapshotName); deleteSnapshotRequestBuilder.execute(new ActionListener<>() { @Override public void onResponse(AcknowledgedResponse acknowledgedResponse) { - logger.info("--> aborted/deleted snapshot [{}:{}]", trackedRepository.repositoryName, snapshotName); + logger.info( + "--> abort/delete snapshot [{}:{}] success", + trackedRepository.repositoryName, + snapshotName + ); Releasables.close(abortReleasable); assertTrue(acknowledgedResponse.isAcknowledged()); } @@ -1046,7 +1050,7 @@ public void onFailure(Exception e) { if (ExceptionsHelper.unwrapCause(e) instanceof SnapshotMissingException) { // processed before the snapshot even started logger.info( - "--> abort/delete of [{}:{}] got snapshot missing", + "--> abort/delete snapshot [{}:{}] got snapshot missing", trackedRepository.repositoryName, snapshotName ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java new file mode 100644 index 0000000000000..d9e6a8eff5ad1 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotsServiceDoubleFinalizationIT.java @@ -0,0 +1,279 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateObserver; +import org.elasticsearch.cluster.SnapshotDeletionsInProgress; +import org.elasticsearch.cluster.SnapshotsInProgress; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.BlobStore; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.support.BlobMetadata; +import org.elasticsearch.common.blobstore.support.FilterBlobContainer; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.mockstore.BlobStoreWrapper; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.BrokenBarrierException; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class SnapshotsServiceDoubleFinalizationIT extends AbstractSnapshotIntegTestCase { + + public void testNoDoubleFinalization() throws Exception { + // 0 - Basic setup + final String masterNodeName = internalCluster().startNode(); + final String dataNodeName = internalCluster().startDataOnlyNode(); + createIndex( + "index-1", + Settings.builder().put("index.number_of_replicas", 0).put("index.routing.allocation.require._name", masterNodeName).build() + ); + indexRandomDocs("index-1", 50); + createIndex( + "index-2", + Settings.builder().put("index.number_of_replicas", 0).put("index.routing.allocation.require._name", dataNodeName).build() + ); + indexRandomDocs("index-2", 50); + createIndex( + "index-3", + Settings.builder().put("index.number_of_replicas", 0).put("index.routing.allocation.require._name", dataNodeName).build() + ); + indexRandomDocs("index-3", 50); + + // 1 - create repository and take a snapshot + final String repoName = "repo"; + createRepository(repoName, TestRepositoryPlugin.REPO_TYPE); + final TestRepository testRepository = getRepositoryOnMaster(repoName); + logger.info("--> create snapshot snap-1"); + createSnapshot(repoName, "snap-1", List.of("index-1")); + + // 2 - Start deleting the snap-1 and block it at listing root blobs + PlainActionFuture future = setWaitForClusterState(state -> { + final SnapshotDeletionsInProgress snapshotDeletionsInProgress = SnapshotDeletionsInProgress.get(state); + return snapshotDeletionsInProgress.getEntries() + .stream() + .flatMap(entry -> entry.getSnapshots().stream()) + .anyMatch(snapshotId -> snapshotId.getName().equals("snap-1")); + + }); + final CyclicBarrier barrier = testRepository.blockOnceForListBlobs(); + new Thread(() -> { + logger.info("--> start deleting snapshot snap-1 "); + startDeleteSnapshot(repoName, "snap-1"); + }).start(); + assertBusy(() -> assertThat(barrier.getNumberWaiting(), equalTo(1))); + future.actionGet(); + logger.info("--> repository blocked at listing root blobs"); + + // 3 - Stop data node so that index-2, index-3 become unassigned + internalCluster().stopNode(dataNodeName); + internalCluster().validateClusterFormed(); + + // 4 - Create new snapshot for the unassigned index and its shards should have both QUEUED and MISSING + future = setWaitForClusterState(state -> { + final SnapshotsInProgress snapshotsInProgress = SnapshotsInProgress.get(state); + return snapshotsInProgress.asStream() + .anyMatch( + entry -> entry.snapshot().getSnapshotId().getName().equals("snap-2") + && entry.state() == SnapshotsInProgress.State.STARTED + && entry.shards() + .values() + .stream() + .map(SnapshotsInProgress.ShardSnapshotStatus::state) + .collect(Collectors.toSet()) + .equals(Set.of(SnapshotsInProgress.ShardState.QUEUED, SnapshotsInProgress.ShardState.MISSING)) + ); + }); + clusterAdmin().prepareCreateSnapshot(repoName, "snap-2") + .setIndices("index-2", "index-3") + .setPartial(true) + .setWaitForCompletion(false) + .get(); + // Delete index-3 so that it becomes MISSING for snapshot + indicesAdmin().prepareDelete("index-3").get(); + future.actionGet(); + + // 5 - Start deleting snap-2, itself should be WAITING. But changes InProgress snap-2 to SUCCESS + future = setWaitForClusterState(state -> { + final SnapshotsInProgress snapshotsInProgress = SnapshotsInProgress.get(state); + final boolean foundSnapshot = snapshotsInProgress.asStream() + .anyMatch( + entry -> entry.snapshot().getSnapshotId().getName().equals("snap-2") + && entry.state() == SnapshotsInProgress.State.SUCCESS + && entry.shards() + .values() + .stream() + .map(SnapshotsInProgress.ShardSnapshotStatus::state) + .collect(Collectors.toSet()) + .equals(Set.of(SnapshotsInProgress.ShardState.FAILED, SnapshotsInProgress.ShardState.MISSING)) + ); + if (false == foundSnapshot) { + return false; + } + final SnapshotDeletionsInProgress snapshotDeletionsInProgress = SnapshotDeletionsInProgress.get(state); + return snapshotDeletionsInProgress.getEntries() + .stream() + .anyMatch( + entry -> entry.state() == SnapshotDeletionsInProgress.State.WAITING + && entry.getSnapshots().stream().anyMatch(snapshotId -> snapshotId.getName().equals("snap-2")) + ); + }); + new Thread(() -> { + logger.info("--> start deleting snapshot snap-2 "); + startDeleteSnapshot(repoName, "snap-2"); + }).start(); + future.actionGet(); + + // 6 - Let the deletion of snap-1 to complete. It should *not* lead to double finalization + barrier.await(); + + awaitNoMoreRunningOperations(); + } + + private PlainActionFuture setWaitForClusterState(Predicate predicate) { + final var clusterStateObserver = new ClusterStateObserver( + internalCluster().getCurrentMasterNodeInstance(ClusterService.class), + logger, + new ThreadContext(Settings.EMPTY) + ); + final PlainActionFuture future = new PlainActionFuture<>(); + clusterStateObserver.waitForNextChange(new ClusterStateObserver.Listener() { + @Override + public void onNewClusterState(ClusterState state) { + future.onResponse(null); + } + + @Override + public void onClusterServiceClose() { + future.onFailure(new IllegalStateException("cluster service closed")); + } + + @Override + public void onTimeout(TimeValue timeout) { + future.onFailure(new IllegalStateException("timeout")); + } + }, predicate, TimeValue.timeValueSeconds(30)); + return future; + } + + @Override + protected Collection> nodePlugins() { + return List.of(TestRepositoryPlugin.class); + } + + public static class TestRepositoryPlugin extends Plugin implements RepositoryPlugin { + + public static final String REPO_TYPE = "test"; + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + return Map.of( + REPO_TYPE, + metadata -> new TestRepository(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) + ); + } + } + + public static class TestRepository extends FsRepository { + + private static final Logger logger = LogManager.getLogger(TestRepository.class); + private final AtomicReference barrierRef = new AtomicReference<>(); + + public TestRepository( + RepositoryMetadata metadata, + Environment environment, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + super(metadata, environment, namedXContentRegistry, clusterService, bigArrays, recoverySettings); + } + + public CyclicBarrier blockOnceForListBlobs() { + final CyclicBarrier barrier = new CyclicBarrier(2); + if (barrierRef.compareAndSet(null, barrier)) { + return barrier; + } else { + throw new AssertionError("must unblock first"); + } + } + + @Override + protected BlobStore createBlobStore() throws Exception { + final var blobStore = super.createBlobStore(); + return new BlobStoreWrapper(blobStore) { + + @Override + public BlobContainer blobContainer(BlobPath path) { + final var blobContainer = super.blobContainer(path); + + return new FilterBlobContainer(blobContainer) { + + @Override + protected BlobContainer wrapChild(BlobContainer child) { + return child; + } + + @Override + public Map listBlobs(OperationPurpose purpose) throws IOException { + final CyclicBarrier barrier = barrierRef.get(); + if (barrier != null) { + try { + logger.info("--> Start blocking blobLists"); + barrier.await(); + if (false == barrierRef.compareAndSet(barrier, null)) { + throw new AssertionError("barrier changed while blocking"); + } + logger.info("--> Done blocking blobLists"); + } catch (InterruptedException | BrokenBarrierException e) { + throw new AssertionError(e); + } + } + return super.listBlobs(purpose); + } + }; + } + }; + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java index d319eab2b192b..d6dad537afaea 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/threadpool/SimpleThreadPoolIT.java @@ -63,8 +63,8 @@ public void testThreadNames() throws Exception { indexRandom(true, builders); int numSearches = randomIntBetween(2, 100); for (int i = 0; i < numSearches; i++) { - assertNoFailures(client().prepareSearch("idx").setQuery(QueryBuilders.termQuery("str_value", "s" + i))); - assertNoFailures(client().prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i))); + assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("str_value", "s" + i))); + assertNoFailures(prepareSearch("idx").setQuery(QueryBuilders.termQuery("l_value", i))); } Set threadNames = new HashSet<>(); for (long l : threadBean.getAllThreadIds()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java b/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java index b9cc8ef8ac515..a68d56e05cb48 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/versioning/ConcurrentSeqNoVersioningIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.coordination.LinearizabilityChecker; +import org.elasticsearch.cluster.coordination.LinearizabilityChecker.LinearizabilityCheckAborted; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -26,8 +27,6 @@ import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.disruption.ServiceDisruptionScheme; -import org.elasticsearch.threadpool.Scheduler; -import org.elasticsearch.threadpool.ThreadPool; import java.io.FileInputStream; import java.io.IOException; @@ -40,10 +39,8 @@ import java.util.Random; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CyclicBarrier; -import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Function; @@ -434,18 +431,9 @@ public void assertLinearizable() { LinearizabilityChecker.SequentialSpec spec = new CASSequentialSpec(initialVersion); boolean linearizable = false; try { - final ScheduledThreadPoolExecutor scheduler = Scheduler.initScheduler(Settings.EMPTY, "test-scheduler"); - final AtomicBoolean abort = new AtomicBoolean(); - // Large histories can be problematic and have the linearizability checker run OOM - // Bound the time how long the checker can run on such histories (Values empirically determined) - if (history.size() > 300) { - scheduler.schedule(() -> abort.set(true), 10, TimeUnit.SECONDS); - } - linearizable = LinearizabilityChecker.isLinearizable(spec, history, missingResponseGenerator(), abort::get); - ThreadPool.terminate(scheduler, 1, TimeUnit.SECONDS); - if (abort.get() && linearizable == false) { - linearizable = true; // let the test pass - } + linearizable = LinearizabilityChecker.isLinearizable(spec, history, missingResponseGenerator()); + } catch (LinearizabilityCheckAborted e) { + logger.warn("linearizability check check was aborted", e); } finally { // implicitly test that we can serialize all histories. String serializedHistory = base64Serialize(history); @@ -683,7 +671,8 @@ public static void main(String[] args) throws Exception { } @SuppressForbidden(reason = "system out is ok for a command line tool") - private static void runLinearizabilityChecker(FileInputStream fileInputStream, long primaryTerm, long seqNo) throws IOException { + private static void runLinearizabilityChecker(FileInputStream fileInputStream, long primaryTerm, long seqNo) throws IOException, + LinearizabilityCheckAborted { StreamInput is = new InputStreamStreamInput(Base64.getDecoder().wrap(fileInputStream)); is = new NamedWriteableAwareStreamInput(is, createNamedWriteableRegistry()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java b/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java index c6fbdc909e2e6..5a1c09098f21f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/versioning/SimpleVersioningIT.java @@ -327,13 +327,13 @@ public void testCompareAndSet() { // search with versioning for (int i = 0; i < 10; i++) { // TODO: ADD SEQ NO! - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).setVersion(true).execute().actionGet(); assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(2L)); } // search without versioning for (int i = 0; i < 10; i++) { - SearchResponse searchResponse = client().prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()).execute().actionGet(); assertThat(searchResponse.getHits().getAt(0).getVersion(), equalTo(Versions.NOT_FOUND)); } @@ -396,8 +396,7 @@ public void testSimpleVersioningWithFlush() throws Exception { client().admin().indices().prepareRefresh().execute().actionGet(); for (int i = 0; i < 10; i++) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse searchResponse = prepareSearch().setQuery(matchAllQuery()) .setVersion(true) .seqNoAndPrimaryTerm(true) .execute() diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 1a082e7558577..3399be18077a3 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -224,6 +224,7 @@ exports org.elasticsearch.common.xcontent.support; exports org.elasticsearch.discovery; exports org.elasticsearch.env; + exports org.elasticsearch.features; exports org.elasticsearch.gateway; exports org.elasticsearch.health; exports org.elasticsearch.health.node; @@ -400,6 +401,10 @@ uses org.elasticsearch.node.internal.TerminationHandlerProvider; uses org.elasticsearch.internal.VersionExtension; uses org.elasticsearch.internal.BuildExtension; + uses org.elasticsearch.features.FeatureSpecification; + + provides org.elasticsearch.features.FeatureSpecification with org.elasticsearch.features.FeaturesSupportedSpecification; + uses org.elasticsearch.plugins.internal.SettingsExtension; uses RestExtension; uses org.elasticsearch.action.admin.cluster.node.info.ComponentVersionNumber; diff --git a/server/src/main/java/org/elasticsearch/Build.java b/server/src/main/java/org/elasticsearch/Build.java index 1320346ce2ee3..24cd82d29614e 100644 --- a/server/src/main/java/org/elasticsearch/Build.java +++ b/server/src/main/java/org/elasticsearch/Build.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Booleans; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.internal.BuildExtension; import org.elasticsearch.plugins.ExtensionLoader; @@ -127,14 +128,14 @@ private static Build findLocalBuild() { final String flavor = "default"; String minWireCompat = Version.CURRENT.minimumCompatibilityVersion().toString(); - String minIndexCompat = minimumCompatString(IndexVersion.MINIMUM_COMPATIBLE); + String minIndexCompat = minimumCompatString(IndexVersions.MINIMUM_COMPATIBLE); String displayString = defaultDisplayString(type, hash, date, qualifiedVersionString(version, qualifier, isSnapshot)); return new Build(flavor, type, hash, date, version, qualifier, isSnapshot, minWireCompat, minIndexCompat, displayString); } public static String minimumCompatString(IndexVersion minimumCompatible) { - if (minimumCompatible.before(IndexVersion.FIRST_DETACHED_INDEX_VERSION)) { + if (minimumCompatible.before(IndexVersions.FIRST_DETACHED_INDEX_VERSION)) { // use Version for compatibility return Version.fromId(minimumCompatible.id()).toString(); } else { diff --git a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java index 7e22e1797b527..6c0836c277444 100644 --- a/server/src/main/java/org/elasticsearch/ExceptionsHelper.java +++ b/server/src/main/java/org/elasticsearch/ExceptionsHelper.java @@ -278,6 +278,25 @@ public static ShardOperationFailedException[] groupBy(ShardOperationFailedExcept return uniqueFailures.toArray(new ShardOperationFailedException[0]); } + /** + * Utility method useful for determine whether to log an Exception or perhaps + * avoid logging a stacktrace if the caller/logger is not interested in these + * types of node/shard issues. + * + * @param t Throwable to inspect + * @return true if the Throwable is an instance of an Exception that indicates + * that either a Node or shard is unavailable/disconnected. + */ + public static boolean isNodeOrShardUnavailableTypeException(Throwable t) { + return (t instanceof org.elasticsearch.action.NoShardAvailableActionException + || t instanceof org.elasticsearch.action.UnavailableShardsException + || t instanceof org.elasticsearch.node.NodeClosedException + || t instanceof org.elasticsearch.transport.NodeDisconnectedException + || t instanceof org.elasticsearch.discovery.MasterNotDiscoveredException + || t instanceof org.elasticsearch.transport.NodeNotConnectedException + || t instanceof org.elasticsearch.cluster.block.ClusterBlockException); + } + private static class GroupBy { final String reason; final String index; diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index a3f36c6a4b6fb..f265ebb240dcf 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -144,6 +144,11 @@ static TransportVersion def(int id) { public static final TransportVersion PIPELINES_IN_BULK_RESPONSE_ADDED = def(8_519_00_0); public static final TransportVersion PLUGIN_DESCRIPTOR_STRING_VERSION = def(8_520_00_0); public static final TransportVersion TOO_MANY_SCROLL_CONTEXTS_EXCEPTION_ADDED = def(8_521_00_0); + public static final TransportVersion UNCONTENDED_REGISTER_ANALYSIS_ADDED = def(8_522_00_0); + public static final TransportVersion TRANSFORM_GET_CHECKPOINT_TIMEOUT_ADDED = def(8_523_00_0); + public static final TransportVersion IP_ADDRESS_WRITEABLE = def(8_524_00_0); + public static final TransportVersion PRIMARY_TERM_ADDED = def(8_525_00_0); + public static final TransportVersion CLUSTER_FEATURES_ADDED = def(8_526_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index 652f5102769ae..af8637cf1febc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.persistent.PersistentTasksService; @@ -54,7 +55,7 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA * Once all feature migrations for 8.x -> 9.x have been tested, we can bump this to Version.V_8_0_0 */ public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersion.V_7_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index f79b0ac9c02b4..c0c9ec493de70 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -8,6 +8,8 @@ package org.elasticsearch.action.admin.cluster.node.tasks.list; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; @@ -40,6 +42,9 @@ import static org.elasticsearch.core.TimeValue.timeValueSeconds; public class TransportListTasksAction extends TransportTasksAction { + + private static final Logger logger = LogManager.getLogger(TransportListTasksAction.class); + public static final ActionType TYPE = new ActionType<>("cluster:monitor/tasks/lists", ListTasksResponse::new); public static long waitForCompletionTimeout(TimeValue timeout) { @@ -127,6 +132,7 @@ protected void processTasks(CancellableTask nodeTask, ListTasksRequest request, } processedTasks.add(task); } + logger.trace("Matched {} tasks of all running {}", processedTasks, taskManager.getTasks().values()); } catch (Exception e) { allMatchedTasksRemovedListener.onFailure(e); return; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java index a329d07fe5f63..b1394d261e790 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesAction.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.nodes.BaseNodeResponse; import org.elasticsearch.client.internal.Client; @@ -100,6 +101,14 @@ public TransportAction(TransportService transportService, ActionFilters actionFi @Override protected void doExecute(Task task, Request request, ActionListener listener) { final ThreadContext threadContext = client.threadPool().getThreadContext(); + executeWithSystemContext( + request, + threadContext, + ContextPreservingActionListener.wrapPreservingContext(listener, threadContext) + ); + } + + private void executeWithSystemContext(Request request, ThreadContext threadContext, ActionListener listener) { try (var ignore = threadContext.stashContext()) { threadContext.markAsSystemContext(); if (request.remoteClusterServer) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java index 374b9b19f16ca..2c8371b0af4f2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/cleanup/TransportCleanupRepositoryAction.java @@ -154,7 +154,10 @@ private void cleanupRepo(String repositoryName, ActionListener repositoryDataListener = new ListenableFuture<>(); - repository.getRepositoryData(repositoryDataListener); + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // Listener is lightweight, only submits a cluster state update task, no need to fork + repositoryDataListener + ); repositoryDataListener.addListener(listener.delegateFailureAndWrap((delegate, repositoryData) -> { final long repositoryStateId = repositoryData.getGenId(); logger.info("Running cleanup operations on repository [{}][{}]", repositoryName, repositoryStateId); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index 5af2c12d39bc1..f4e301e0748bb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -38,6 +38,7 @@ import java.io.IOException; import java.util.Map; +import java.util.Set; import java.util.function.Predicate; public class TransportClusterStateAction extends TransportMasterNodeReadAction { @@ -143,6 +144,11 @@ private static Map getCompatibilityVersions(Clust return clusterState.compatibilityVersions(); } + @SuppressForbidden(reason = "exposing ClusterState#clusterFeatures requires reading them") + private static Map> getClusterFeatures(ClusterState clusterState) { + return clusterState.clusterFeatures().nodeFeatures(); + } + private ClusterStateResponse buildResponse(final ClusterStateRequest request, final ClusterState currentState) { logger.trace("Serving cluster state request using version {}", currentState.version()); ClusterState.Builder builder = ClusterState.builder(currentState.getClusterName()); @@ -152,6 +158,7 @@ private ClusterStateResponse buildResponse(final ClusterStateRequest request, fi if (request.nodes()) { builder.nodes(currentState.nodes()); builder.nodeIdsToCompatibilityVersions(getCompatibilityVersions(currentState)); + builder.nodeFeatures(getClusterFeatures(currentState)); } if (request.routingTable()) { if (request.indices().length > 0) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java index f5ebbcd4cc3c9..f4c72d7e37f3d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportShardRefreshAction.java @@ -126,6 +126,7 @@ public void onPrimaryOperationComplete( } else { UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( indexShardRoutingTable, + replicaRequest.primaryRefreshResult.primaryTerm(), replicaRequest.primaryRefreshResult.generation(), false ); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java index aab4e1f52b1fc..64091af1266f8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/TransportUnpromotableShardRefreshAction.java @@ -60,7 +60,11 @@ protected void unpromotableShardOperation( ) { ActionListener.run(responseListener, listener -> { IndexShard shard = indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); - shard.waitForSegmentGeneration(request.getSegmentGeneration(), listener.map(l -> ActionResponse.Empty.INSTANCE)); + shard.waitForPrimaryTermAndGeneration( + request.getPrimaryTerm(), + request.getSegmentGeneration(), + listener.map(l -> ActionResponse.Empty.INSTANCE) + ); }); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java index 3e514b230bd54..2e4492725dd8f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/UnpromotableShardRefreshRequest.java @@ -8,9 +8,11 @@ package org.elasticsearch.action.admin.indices.refresh; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.broadcast.unpromotable.BroadcastUnpromotableRequest; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.engine.Engine; @@ -21,20 +23,26 @@ public class UnpromotableShardRefreshRequest extends BroadcastUnpromotableRequest { + private final long primaryTerm; private final long segmentGeneration; public UnpromotableShardRefreshRequest( IndexShardRoutingTable indexShardRoutingTable, + long primaryTerm, long segmentGeneration, boolean failShardOnError ) { super(indexShardRoutingTable, failShardOnError); + this.primaryTerm = primaryTerm; this.segmentGeneration = segmentGeneration; } public UnpromotableShardRefreshRequest(StreamInput in) throws IOException { super(in); segmentGeneration = in.readVLong(); + primaryTerm = in.getTransportVersion().onOrAfter(TransportVersions.PRIMARY_TERM_ADDED) + ? in.readVLong() + : Engine.UNKNOWN_PRIMARY_TERM; } @Override @@ -50,14 +58,26 @@ public ActionRequestValidationException validate() { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeVLong(segmentGeneration); + if (out.getTransportVersion().onOrAfter(TransportVersions.PRIMARY_TERM_ADDED)) { + out.writeVLong(primaryTerm); + } } public long getSegmentGeneration() { return segmentGeneration; } + public long getPrimaryTerm() { + return primaryTerm; + } + @Override public String toString() { - return "UnpromotableShardRefreshRequest{" + "shardId=" + shardId() + ", segmentGeneration=" + segmentGeneration + '}'; + return Strings.format( + "UnpromotableShardRefreshRequest{shardId=%s, primaryTerm=%d, segmentGeneration=%d}", + shardId(), + primaryTerm, + segmentGeneration + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index c2177b757a7a3..1ba249aff8538 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -34,7 +34,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemDataStreamDescriptor; import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.snapshots.SnapshotInProgressException; @@ -327,7 +327,7 @@ private static void downgradeBrokenTsdbBackingIndices(DataStream dataStream, Met for (Index indexName : dataStream.getIndices()) { var index = builder.getSafe(indexName); final Settings originalSettings = index.getSettings(); - if (index.getCreationVersion().before(IndexVersion.FIRST_DETACHED_INDEX_VERSION) + if (index.getCreationVersion().before(IndexVersions.FIRST_DETACHED_INDEX_VERSION) && index.getIndexMode() == IndexMode.TIME_SERIES && originalSettings.keySet().contains(IndexSettings.TIME_SERIES_START_TIME.getKey()) == false && originalSettings.keySet().contains(IndexSettings.TIME_SERIES_END_TIME.getKey()) == false) { diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index c9a44c14106ee..d9837f94b0996 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -325,9 +325,31 @@ private static FieldCapabilitiesResponse merge( } else { collectResponseMap(responseMapBuilder, responseMap); } + + // The merge method is only called on the primary coordinator for cross-cluster field caps, so we + // log relevant "5xx" errors that occurred in this 2xx response to ensure they are only logged once. + // These failures have already been deduplicated, before this method was called. + for (FieldCapabilitiesFailure failure : failures) { + if (shouldLogException(failure.getException())) { + LOGGER.warn( + "Field caps partial-results Exception for indices " + Arrays.toString(failure.getIndices()), + failure.getException() + ); + } + } return new FieldCapabilitiesResponse(indices, Collections.unmodifiableMap(responseMap), failures); } + private static boolean shouldLogException(Exception e) { + // ConnectTransportExceptions are thrown when a cluster marked with skip_unavailable=false are not available for searching + // (Clusters marked with skip_unavailable=false return a different error that is considered a 4xx error.) + // In such a case, the field-caps endpoint returns a 200 (unless all clusters failed). + // To keep the logs from being too noisy, we choose not to log the ConnectTransportException here. + return e instanceof org.elasticsearch.transport.ConnectTransportException == false + && ExceptionsHelper.status(e).getStatus() >= 500 + && ExceptionsHelper.isNodeOrShardUnavailableTypeException(e) == false; + } + private static void collectResponseMapIncludingUnmapped( String[] indices, Map> responseMapBuilder, diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java index 4b70373979919..9f2fe8ae5aa8c 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -213,7 +214,9 @@ private void handleGetOnUnpromotableShard(GetRequest request, IndexShard indexSh ActionRunnable.supply(l, () -> shardOperation(request, shardId)).run(); } else { assert r.segmentGeneration() > -1L; - indexShard.waitForSegmentGeneration( + assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; + indexShard.waitForPrimaryTermAndGeneration( + r.primaryTerm(), r.segmentGeneration(), listener.delegateFailureAndWrap((ll, aLong) -> super.asyncShardOperation(request, shardId, ll)) ); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java index 553da3d845b54..1b180874b433d 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportGetFromTranslogAction.java @@ -10,6 +10,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -18,6 +19,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -81,7 +83,7 @@ protected void doExecute(Task task, Request request, ActionListener li } segmentGeneration = ((InternalEngine) engine).getLastUnsafeSegmentGenerationForGets(); } - return new Response(result, segmentGeneration); + return new Response(result, indexShard.getOperationPrimaryTerm(), segmentGeneration); }); } @@ -140,23 +142,31 @@ public IndicesOptions indicesOptions() { public static class Response extends ActionResponse { @Nullable private final GetResult getResult; + private final long primaryTerm; private final long segmentGeneration; - public Response(GetResult getResult, long segmentGeneration) { + public Response(GetResult getResult, long primaryTerm, long segmentGeneration) { this.getResult = getResult; this.segmentGeneration = segmentGeneration; + this.primaryTerm = primaryTerm; } public Response(StreamInput in) throws IOException { super(in); segmentGeneration = in.readZLong(); getResult = in.readOptionalWriteable(GetResult::new); + primaryTerm = in.getTransportVersion().onOrAfter(TransportVersions.PRIMARY_TERM_ADDED) + ? in.readVLong() + : Engine.UNKNOWN_PRIMARY_TERM; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeZLong(segmentGeneration); out.writeOptionalWriteable(getResult); + if (out.getTransportVersion().onOrAfter(TransportVersions.PRIMARY_TERM_ADDED)) { + out.writeVLong(primaryTerm); + } } @Nullable @@ -173,22 +183,33 @@ public long segmentGeneration() { return segmentGeneration; } + public long primaryTerm() { + return primaryTerm; + } + @Override public boolean equals(Object o) { if (this == o) return true; if (o instanceof Response == false) return false; - Response other = (Response) o; - return segmentGeneration == other.segmentGeneration && Objects.equals(getResult, other.getResult); + Response response = (Response) o; + return segmentGeneration == response.segmentGeneration + && Objects.equals(getResult, response.getResult) + && primaryTerm == response.primaryTerm; } @Override public int hashCode() { - return Objects.hash(segmentGeneration, getResult); + return Objects.hash(segmentGeneration, getResult, primaryTerm); } @Override public String toString() { - return "Response{" + "getResult=" + getResult + ", segmentGeneration=" + segmentGeneration + "}"; + return Strings.format( + "Response{getResult=%s, primaryTerm=%d, segmentGeneration=%d}", + getResult, + primaryTerm, + segmentGeneration + ); } } } diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 83d5e6a89d138..9986c4019c81b 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; @@ -203,7 +204,9 @@ private void handleMultiGetOnUnpromotableShard( ActionRunnable.supply(l, () -> handleLocalGets(request, r.multiGetShardResponse(), shardId)).run(); } else { assert r.segmentGeneration() > -1L; - indexShard.waitForSegmentGeneration( + assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; + indexShard.waitForPrimaryTermAndGeneration( + r.primaryTerm(), r.segmentGeneration(), listener.delegateFailureAndWrap( (ll, aLong) -> getExecutor(request, shardId).execute( diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java index 032f002a675c4..5058990efd966 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetFomTranslogAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.get; import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -16,6 +17,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.TransportActions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.IndexService; @@ -102,7 +104,7 @@ protected void doExecute(Task task, Request request, ActionListener li } segmentGeneration = ((InternalEngine) engine).getLastUnsafeSegmentGenerationForGets(); } - return new Response(multiGetShardResponse, segmentGeneration); + return new Response(multiGetShardResponse, indexShard.getOperationPrimaryTerm(), segmentGeneration); }); } @@ -164,9 +166,11 @@ public int hashCode() { public static class Response extends ActionResponse { private final MultiGetShardResponse multiGetShardResponse; + private final long primaryTerm; private final long segmentGeneration; - public Response(MultiGetShardResponse response, long segmentGeneration) { + public Response(MultiGetShardResponse response, long primaryTerm, long segmentGeneration) { + this.primaryTerm = primaryTerm; this.segmentGeneration = segmentGeneration; this.multiGetShardResponse = response; } @@ -175,43 +179,55 @@ public Response(StreamInput in) throws IOException { super(in); segmentGeneration = in.readZLong(); multiGetShardResponse = new MultiGetShardResponse(in); + primaryTerm = in.getTransportVersion().onOrAfter(TransportVersions.PRIMARY_TERM_ADDED) + ? in.readVLong() + : Engine.UNKNOWN_PRIMARY_TERM; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeZLong(segmentGeneration); multiGetShardResponse.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.PRIMARY_TERM_ADDED)) { + out.writeVLong(primaryTerm); + } } public long segmentGeneration() { return segmentGeneration; } + public long primaryTerm() { + return primaryTerm; + } + public MultiGetShardResponse multiGetShardResponse() { return multiGetShardResponse; } @Override public String toString() { - return "ShardMultiGetFomTranslogResponse{" - + "multiGetShardResponse=" - + multiGetShardResponse - + ", segmentGeneration=" - + segmentGeneration - + "}"; + return Strings.format( + "ShardMultiGetFomTranslogResponse{multiGetShardResponse=%s, primaryTerm=%d, segmentGeneration=%d}", + multiGetShardResponse, + primaryTerm, + segmentGeneration + ); } @Override public boolean equals(Object o) { if (this == o) return true; if (o instanceof Response == false) return false; - Response other = (Response) o; - return segmentGeneration == other.segmentGeneration && Objects.equals(multiGetShardResponse, other.multiGetShardResponse); + Response response = (Response) o; + return segmentGeneration == response.segmentGeneration + && Objects.equals(multiGetShardResponse, response.multiGetShardResponse) + && primaryTerm == response.primaryTerm; } @Override public int hashCode() { - return Objects.hash(segmentGeneration, multiGetShardResponse); + return Objects.hash(segmentGeneration, multiGetShardResponse, primaryTerm); } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java index 6fd7ef88a8e86..9f6d4ed27cf6c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/PostWriteRefresh.java @@ -146,6 +146,7 @@ private void sendUnpromotableRequests( ) { UnpromotableShardRefreshRequest unpromotableReplicaRequest = new UnpromotableShardRefreshRequest( indexShard.getReplicationGroup().getRoutingTable(), + indexShard.getOperationPrimaryTerm(), generation, true ); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/ServerArgs.java b/server/src/main/java/org/elasticsearch/bootstrap/ServerArgs.java index bc7f1928d170c..f0b1f654150a9 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/ServerArgs.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/ServerArgs.java @@ -29,10 +29,17 @@ * @param secrets the provided secure settings implementation * @param nodeSettings the node settings read from {@code elasticsearch.yml}, the cli and the process environment * @param configDir the directory where {@code elasticsearch.yml} and other config exists + * @param logsDir the directory where log files should be written */ -public record ServerArgs(boolean daemonize, boolean quiet, Path pidFile, SecureSettings secrets, Settings nodeSettings, Path configDir) - implements - Writeable { +public record ServerArgs( + boolean daemonize, + boolean quiet, + Path pidFile, + SecureSettings secrets, + Settings nodeSettings, + Path configDir, + Path logsDir +) implements Writeable { /** * Arguments for running Elasticsearch. @@ -59,6 +66,7 @@ public ServerArgs(StreamInput in) throws IOException { readPidFile(in), readSecureSettingsFromStream(in), Settings.readSettingsFromStream(in), + resolvePath(in.readString()), resolvePath(in.readString()) ); } @@ -82,6 +90,7 @@ public void writeTo(StreamOutput out) throws IOException { secrets.writeTo(out); nodeSettings.writeTo(out); out.writeString(configDir.toString()); + out.writeString(logsDir.toString()); } private static SecureSettings readSecureSettingsFromStream(StreamInput in) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/client/internal/Client.java b/server/src/main/java/org/elasticsearch/client/internal/Client.java index 1065efb857fe7..89cb764549767 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/Client.java +++ b/server/src/main/java/org/elasticsearch/client/internal/Client.java @@ -73,6 +73,7 @@ */ public interface Client extends ElasticsearchClient, Releasable { + // Note: This setting is registered only for bwc. The value is never read. Setting CLIENT_TYPE_SETTING_S = new Setting<>("client.type", "node", (s) -> { return switch (s) { case "node", "transport" -> s; diff --git a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java index f82a5a60600c7..967e5c72efdd0 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ParentTaskAssigningClient.java @@ -38,6 +38,10 @@ public ParentTaskAssigningClient(Client in, DiscoveryNode localNode, Task parent this(in, new TaskId(localNode.getId(), parentTask.getId())); } + public TaskId getParentTask() { + return parentTask; + } + /** * Fetch the wrapped client. Use this to make calls that don't set {@link ActionRequest#setParentTask(TaskId)}. */ diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java new file mode 100644 index 0000000000000..ae68bfafdd6c5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterFeatures.java @@ -0,0 +1,266 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster; + +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ChunkedToXContentObject; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.xcontent.ToXContent; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; +import java.util.stream.Collectors; + +/** + * Stores information on what features are present throughout the cluster + */ +public class ClusterFeatures implements Diffable, ChunkedToXContentObject { + + /** + * The features on each individual node + */ + private final Map> nodeFeatures; + /** + * The features present on all nodes + */ + private Set allNodeFeatures; + + public ClusterFeatures(Map> nodeFeatures) { + this.nodeFeatures = nodeFeatures.entrySet() + .stream() + .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> Set.copyOf(e.getValue()))); + } + + private Set calculateAllNodeFeatures() { + if (nodeFeatures.isEmpty()) { + return Set.of(); + } + + Set allNodeFeatures = null; + for (Set featureSet : nodeFeatures.values()) { + if (allNodeFeatures == null) { + allNodeFeatures = new HashSet<>(featureSet); + } else { + allNodeFeatures.retainAll(featureSet); + } + } + return Set.copyOf(allNodeFeatures); + } + + /** + * Returns the features reported by each node in the cluster. + *

    + * NOTE: This should not be used directly. + * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. + */ + public Map> nodeFeatures() { + return nodeFeatures; + } + + /** + * {@code true} if {@code feature} is present on all nodes in the cluster. + *

    + * NOTE: This should not be used directly, as it does not read historical features. + * Please use {@link org.elasticsearch.features.FeatureService#clusterHasFeature} instead. + */ + public boolean clusterHasFeature(NodeFeature feature) { + if (allNodeFeatures == null) { + allNodeFeatures = calculateAllNodeFeatures(); + } + return allNodeFeatures.contains(feature.id()); + } + + /** + * Writes a canonical set of feature sets to {@code StreamOutput}. + * This aims to minimise the data serialized by assuming that most feature sets are going to be identical + * in any one cluster state. + */ + private static void writeCanonicalSets(StreamOutput out, Map> featureSets) throws IOException { + List> canonicalFeatureSets = new ArrayList<>(); + Map nodeFeatureSetIndexes = new HashMap<>(); + + IdentityHashMap, Integer> identityLookup = new IdentityHashMap<>(); + Map, Integer> lookup = new HashMap<>(); + for (var fse : featureSets.entrySet()) { + // do a fast identity lookup first + Integer idx = identityLookup.get(fse.getValue()); + if (idx != null) { + nodeFeatureSetIndexes.put(fse.getKey(), idx); + continue; + } + + // do a contents equality lookup next + idx = lookup.get(fse.getValue()); + if (idx != null) { + nodeFeatureSetIndexes.put(fse.getKey(), idx); + continue; + } + + // we've found a new feature set - insert appropriately + idx = canonicalFeatureSets.size(); + canonicalFeatureSets.add(fse.getValue()); + nodeFeatureSetIndexes.put(fse.getKey(), idx); + identityLookup.put(fse.getValue(), idx); + lookup.put(fse.getValue(), idx); + } + + out.writeCollection(canonicalFeatureSets, (o, c) -> o.writeCollection(c, StreamOutput::writeString)); + out.writeMap(nodeFeatureSetIndexes, StreamOutput::writeVInt); + } + + private static Map> readCanonicalSets(StreamInput in) throws IOException { + List> featureSets = in.readCollectionAsList(i -> i.readCollectionAsImmutableSet(StreamInput::readString)); + Map nodeIndexes = in.readMap(StreamInput::readVInt); + + return nodeIndexes.entrySet().stream().collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, e -> featureSets.get(e.getValue()))); + } + + public static ClusterFeatures readFrom(StreamInput in) throws IOException { + return new ClusterFeatures(readCanonicalSets(in)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + writeCanonicalSets(out, nodeFeatures); + } + + @Override + public Diff diff(ClusterFeatures previousState) { + Set deletes = new HashSet<>(); + Map> removals = new HashMap<>(); + Map> additions = new HashMap<>(); + + for (var prevNodeFeatures : previousState.nodeFeatures.entrySet()) { + Set newFeatures = nodeFeatures.get(prevNodeFeatures.getKey()); + if (newFeatures == null) { + deletes.add(prevNodeFeatures.getKey()); + } else { + Set removed = new HashSet<>(prevNodeFeatures.getValue()); + removed.removeAll(newFeatures); + if (removed.isEmpty() == false) { + removals.put(prevNodeFeatures.getKey(), removed); + } + + Set added = new HashSet<>(newFeatures); + added.removeAll(prevNodeFeatures.getValue()); + if (added.isEmpty() == false) { + additions.put(prevNodeFeatures.getKey(), added); + } + } + } + + // find any completely new nodes + for (var newNodeFeatures : nodeFeatures.entrySet()) { + if (previousState.nodeFeatures.containsKey(newNodeFeatures.getKey()) == false) { + additions.put(newNodeFeatures.getKey(), newNodeFeatures.getValue()); + } + } + + return new ClusterFeaturesDiff(deletes, removals, additions); + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return new ClusterFeaturesDiff(in); + } + + private static class ClusterFeaturesDiff implements Diff { + + private final Set deletes; + private final Map> removals; + private final Map> additions; + + private ClusterFeaturesDiff(Set deletes, Map> removals, Map> additions) { + this.deletes = deletes; + this.removals = removals; + this.additions = additions; + } + + private ClusterFeaturesDiff(StreamInput in) throws IOException { + deletes = in.readCollectionAsImmutableSet(StreamInput::readString); + removals = readCanonicalSets(in); + additions = readCanonicalSets(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(deletes, StreamOutput::writeString); + writeCanonicalSets(out, removals); + writeCanonicalSets(out, additions); + } + + @Override + public ClusterFeatures apply(ClusterFeatures part) { + if (deletes.isEmpty() && removals.isEmpty() && additions.isEmpty()) { + return part; // nothing changing + } + + Map> newFeatures = new HashMap<>(part.nodeFeatures); + deletes.forEach(newFeatures::remove); + + // make sure each value is mutable when we modify it + for (var removes : removals.entrySet()) { + newFeatures.compute(removes.getKey(), (k, v) -> v instanceof HashSet ? v : new HashSet<>(v)).removeAll(removes.getValue()); + } + for (var adds : additions.entrySet()) { + newFeatures.compute(adds.getKey(), (k, v) -> v == null ? new HashSet<>() : v instanceof HashSet ? v : new HashSet<>(v)) + .addAll(adds.getValue()); + } + + return new ClusterFeatures(newFeatures); + } + } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + return Iterators.concat( + Iterators.single((builder, p) -> builder.startArray()), + nodeFeatures.entrySet().stream().sorted(Map.Entry.comparingByKey()).map(e -> (builder, p) -> { + String[] features = e.getValue().toArray(String[]::new); + Arrays.sort(features); + return builder.startObject().field("node_id", e.getKey()).array("features", features).endObject(); + }).iterator(), + Iterators.single((builder, p) -> builder.endArray()) + ); + } + + @Override + public String toString() { + // sort for ease of debugging + var features = new TreeMap<>(nodeFeatures); + features.replaceAll((k, v) -> new TreeSet<>(v)); + return "ClusterFeatures" + features; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof ClusterFeatures == false) return false; + if (this == obj) return true; + + ClusterFeatures that = (ClusterFeatures) obj; + return nodeFeatures.equals(that.nodeFeatures); + } + + @Override + public int hashCode() { + return Objects.hash(nodeFeatures); + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index aa1fba7aecc81..0e72aa545e427 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContent; @@ -57,6 +58,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.Executor; import java.util.function.Consumer; import java.util.function.Function; @@ -168,6 +170,8 @@ public CompatibilityVersions read(StreamInput in, String key) throws IOException private final Map compatibilityVersions; private final CompatibilityVersions minVersions; + private final ClusterFeatures clusterFeatures; + private final Metadata metadata; private final ClusterBlocks blocks; @@ -190,6 +194,7 @@ public ClusterState(long version, String stateUUID, ClusterState state) { state.routingTable(), state.nodes(), state.compatibilityVersions, + state.clusterFeatures(), state.blocks(), state.customs(), false, @@ -205,6 +210,7 @@ public ClusterState( RoutingTable routingTable, DiscoveryNodes nodes, Map compatibilityVersions, + ClusterFeatures clusterFeatures, ClusterBlocks blocks, Map customs, boolean wasReadFromDiff, @@ -217,6 +223,7 @@ public ClusterState( this.routingTable = routingTable; this.nodes = nodes; this.compatibilityVersions = Map.copyOf(compatibilityVersions); + this.clusterFeatures = clusterFeatures; this.blocks = blocks; this.customs = customs; this.wasReadFromDiff = wasReadFromDiff; @@ -298,6 +305,10 @@ public Map getMinSystemIndexMappi return this.minVersions.systemIndexMappingsVersion(); } + public ClusterFeatures clusterFeatures() { + return clusterFeatures; + } + public Metadata metadata() { return this.metadata; } @@ -485,11 +496,15 @@ public String toString() { sb.append(blocks()); sb.append(nodes()); if (compatibilityVersions.isEmpty() == false) { - sb.append("node versions:\n"); + sb.append("node versions:\n"); for (var tv : compatibilityVersions.entrySet()) { sb.append(TAB).append(tv.getKey()).append(": ").append(tv.getValue()).append("\n"); } } + sb.append("cluster features:\n"); + for (var nf : getNodeFeatures(clusterFeatures).entrySet()) { + sb.append(TAB).append(nf.getKey()).append(": ").append(new TreeSet<>(nf.getValue())).append("\n"); + } sb.append(routingTable()); sb.append(getRoutingNodes()); if (customs.isEmpty() == false) { @@ -653,6 +668,14 @@ public Iterator toXContentChunked(ToXContent.Params outerP (builder, params) -> builder.endArray() ), + // per-node feature information + metrics.contains(Metric.NODES) + ? Iterators.concat( + Iterators.single((b, p) -> b.field("nodes_features")), + clusterFeatures.toXContentChunked(outerParams) + ) + : Collections.emptyIterator(), + // metadata metrics.contains(Metric.METADATA) ? metadata.toXContentChunked(outerParams) : Collections.emptyIterator(), @@ -735,6 +758,11 @@ public ClusterState copyAndUpdateMetadata(Consumer updater) { return copyAndUpdate(builder -> builder.metadata(metadata().copyAndUpdate(updater))); } + @SuppressForbidden(reason = "directly reading ClusterState#clusterFeatures") + private static Map> getNodeFeatures(ClusterFeatures features) { + return features.nodeFeatures(); + } + public static class Builder { private ClusterState previous; @@ -746,6 +774,7 @@ public static class Builder { private RoutingTable routingTable = RoutingTable.EMPTY_ROUTING_TABLE; private DiscoveryNodes nodes = DiscoveryNodes.EMPTY_NODES; private final Map compatibilityVersions; + private final Map> nodeFeatures; private ClusterBlocks blocks = ClusterBlocks.EMPTY_CLUSTER_BLOCK; private final ImmutableOpenMap.Builder customs; private boolean fromDiff; @@ -757,6 +786,7 @@ public Builder(ClusterState state) { this.uuid = state.stateUUID(); this.nodes = state.nodes(); this.compatibilityVersions = new HashMap<>(state.compatibilityVersions); + this.nodeFeatures = new HashMap<>(getNodeFeatures(state.clusterFeatures())); this.routingTable = state.routingTable(); this.metadata = state.metadata(); this.blocks = state.blocks(); @@ -766,6 +796,7 @@ public Builder(ClusterState state) { public Builder(ClusterName clusterName) { this.compatibilityVersions = new HashMap<>(); + this.nodeFeatures = new HashMap<>(); customs = ImmutableOpenMap.builder(); this.clusterName = clusterName; } @@ -823,6 +854,22 @@ public Map compatibilityVersions() { return Collections.unmodifiableMap(this.compatibilityVersions); } + public Builder nodeFeatures(ClusterFeatures features) { + this.nodeFeatures.clear(); + this.nodeFeatures.putAll(getNodeFeatures(features)); + return this; + } + + public Builder nodeFeatures(Map> nodeFeatures) { + this.nodeFeatures.clear(); + this.nodeFeatures.putAll(nodeFeatures); + return this; + } + + public Map> nodeFeatures() { + return Collections.unmodifiableMap(this.nodeFeatures); + } + public Builder routingTable(RoutingTable.Builder routingTableBuilder) { return routingTable(routingTableBuilder.build()); } @@ -901,6 +948,15 @@ public ClusterState build() { } else { routingNodes = null; } + + // ensure every node in the cluster has a feature set + // nodes can be null in some tests + if (nodes != null) { + for (DiscoveryNode node : nodes) { + nodeFeatures.putIfAbsent(node.getId(), Set.of()); + } + } + return new ClusterState( clusterName, version, @@ -909,6 +965,7 @@ public ClusterState build() { routingTable, nodes, compatibilityVersions, + new ClusterFeatures(nodeFeatures), blocks, customs.build(), fromDiff, @@ -961,6 +1018,9 @@ public static ClusterState readFrom(StreamInput in, DiscoveryNode localNode) thr .values() .forEach(n -> builder.putCompatibilityVersions(n.getId(), inferTransportVersion(n), Map.of())); } + if (in.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { + builder.nodeFeatures(ClusterFeatures.readFrom(in)); + } builder.blocks = ClusterBlocks.readFrom(in); int customSize = in.readVInt(); for (int i = 0; i < customSize; i++) { @@ -1002,6 +1062,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeMap(compatibilityVersions, (streamOutput, versions) -> versions.writeTo(streamOutput)); } + if (out.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { + clusterFeatures.writeTo(out); + } blocks.writeTo(out); VersionedNamedWriteable.writeVersionedWritables(out, customs); if (out.getTransportVersion().before(TransportVersions.V_8_0_0)) { @@ -1025,6 +1088,7 @@ private static class ClusterStateDiff implements Diff { @Nullable private final Diff> versions; + private final Diff features; private final Diff metadata; @@ -1045,6 +1109,7 @@ private static class ClusterStateDiff implements Diff { DiffableUtils.getStringKeySerializer(), COMPATIBILITY_VERSIONS_VALUE_SERIALIZER ); + features = after.clusterFeatures.diff(before.clusterFeatures); metadata = after.metadata.diff(before.metadata); blocks = after.blocks.diff(before.blocks); customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER); @@ -1066,6 +1131,11 @@ private static class ClusterStateDiff implements Diff { } else { versions = null; // infer at application time } + if (in.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { + features = ClusterFeatures.readDiffFrom(in); + } else { + features = null; // fill in when nodes re-register with a master that understands features + } metadata = Metadata.readDiffFrom(in); blocks = ClusterBlocks.readDiffFrom(in); customs = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), CUSTOM_VALUE_SERIALIZER); @@ -1085,6 +1155,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { out.writeOptionalWriteable(versions); } + if (out.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { + features.writeTo(out); + } metadata.writeTo(out); blocks.writeTo(out); customs.writeTo(out); @@ -1117,6 +1190,9 @@ public ClusterState apply(ClusterState state) { .values() .forEach(n -> builder.putCompatibilityVersions(n.getId(), inferTransportVersion(n), Map.of())); } + if (features != null) { + builder.nodeFeatures(this.features.apply(state.clusterFeatures)); + } builder.metadata(metadata.apply(state.metadata)); builder.blocks(blocks.apply(state.blocks)); builder.customs(customs.apply(state.customs)); diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java index a4fde4993a47e..234c9a924d8a8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java @@ -11,6 +11,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.ClusterState.Custom; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; @@ -321,7 +322,14 @@ public long repositoryStateId() { @Override public String toString() { - return "SnapshotDeletionsInProgress.Entry[[" + uuid + "][" + state + "]" + snapshots + "]"; + return Strings.format( + "SnapshotDeletionsInProgress.Entry[[%s@%d][%s][%s]%s]", + repoName, + repositoryStateId, + uuid, + state, + snapshots + ); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 8df26075c13f7..08e31e11ae256 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -211,7 +211,8 @@ public Coordinator( Reconfigurator reconfigurator, LeaderHeartbeatService leaderHeartbeatService, PreVoteCollector.Factory preVoteCollectorFactory, - CompatibilityVersions compatibilityVersions + CompatibilityVersions compatibilityVersions, + Set features ) { this.settings = settings; this.transportService = transportService; @@ -236,7 +237,8 @@ public Coordinator( circuitBreakerService, reconfigurator::maybeReconfigureAfterNewMasterIsElected, this::getLatestStoredStateAfterWinningAnElection, - compatibilityVersions + compatibilityVersions, + features ); this.joinValidationService = new JoinValidationService( settings, @@ -776,7 +778,12 @@ private void processJoinRequest(JoinRequest joinRequest, ActionListener jo && optionalJoin.stream().allMatch(j -> j.getTerm() <= getCurrentTerm()); optionalJoin.ifPresent(this::handleJoin); - joinAccumulator.handleJoinRequest(joinRequest.getSourceNode(), joinRequest.getCompatibilityVersions(), joinListener); + joinAccumulator.handleJoinRequest( + joinRequest.getSourceNode(), + joinRequest.getCompatibilityVersions(), + joinRequest.getFeatures(), + joinListener + ); if (prevElectionWon == false && coordState.electionWon()) { becomeLeader(); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 247034c88ed62..e5dee6aeb67e2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -51,6 +51,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; @@ -78,6 +79,7 @@ public class JoinHelper { private final CircuitBreakerService circuitBreakerService; private final ObjLongConsumer> latestStoredStateSupplier; private final CompatibilityVersions compatibilityVersions; + private final Set features; private final Map, PendingJoinInfo> pendingOutgoingJoins = ConcurrentCollections.newConcurrentMap(); private final AtomicReference lastFailedJoinAttempt = new AtomicReference<>(); @@ -97,7 +99,8 @@ public class JoinHelper { CircuitBreakerService circuitBreakerService, Function maybeReconfigureAfterMasterElection, ObjLongConsumer> latestStoredStateSupplier, - CompatibilityVersions compatibilityVersions + CompatibilityVersions compatibilityVersions, + Set features ) { this.joinTaskQueue = masterService.createTaskQueue( "node-join", @@ -112,6 +115,7 @@ public class JoinHelper { this.joinReasonService = joinReasonService; this.latestStoredStateSupplier = latestStoredStateSupplier; this.compatibilityVersions = compatibilityVersions; + this.features = features; transportService.registerRequestHandler( JOIN_ACTION_NAME, @@ -239,7 +243,13 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional logger.debug("dropping join request to [{}]: [{}]", destination, statusInfo.getInfo()); return; } - final JoinRequest joinRequest = new JoinRequest(transportService.getLocalNode(), compatibilityVersions, term, optionalJoin); + final JoinRequest joinRequest = new JoinRequest( + transportService.getLocalNode(), + compatibilityVersions, + features, + term, + optionalJoin + ); final Tuple dedupKey = Tuple.tuple(destination, joinRequest); final var pendingJoinInfo = new PendingJoinInfo(transportService.getThreadPool().relativeTimeInMillis()); if (pendingOutgoingJoins.putIfAbsent(dedupKey, pendingJoinInfo) == null) { @@ -405,7 +415,12 @@ List getInFlightJoinStatuses() { } interface JoinAccumulator { - void handleJoinRequest(DiscoveryNode sender, CompatibilityVersions compatibilityVersions, ActionListener joinListener); + void handleJoinRequest( + DiscoveryNode sender, + CompatibilityVersions compatibilityVersions, + Set features, + ActionListener joinListener + ); default void close(Mode newMode) {} } @@ -415,11 +430,13 @@ class LeaderJoinAccumulator implements JoinAccumulator { public void handleJoinRequest( DiscoveryNode sender, CompatibilityVersions compatibilityVersions, + Set features, ActionListener joinListener ) { final JoinTask task = JoinTask.singleNode( sender, compatibilityVersions, + features, joinReasonService.getJoinReason(sender, Mode.LEADER), joinListener, currentTermSupplier.getAsLong() @@ -438,6 +455,7 @@ static class InitialJoinAccumulator implements JoinAccumulator { public void handleJoinRequest( DiscoveryNode sender, CompatibilityVersions compatibilityVersions, + Set features, ActionListener joinListener ) { assert false : "unexpected join from " + sender + " during initialisation"; @@ -455,6 +473,7 @@ static class FollowerJoinAccumulator implements JoinAccumulator { public void handleJoinRequest( DiscoveryNode sender, CompatibilityVersions compatibilityVersions, + Set features, ActionListener joinListener ) { joinListener.onFailure(new CoordinationStateRejectedException("join target is a follower")); @@ -468,19 +487,22 @@ public String toString() { class CandidateJoinAccumulator implements JoinAccumulator { - private final Map>> joinRequestAccumulator = new HashMap<>(); + private record JoinInformation(CompatibilityVersions compatibilityVersions, Set features, ActionListener listener) {} + + private final Map joinRequestAccumulator = new HashMap<>(); boolean closed; @Override public void handleJoinRequest( DiscoveryNode sender, CompatibilityVersions compatibilityVersions, + Set features, ActionListener joinListener ) { assert closed == false : "CandidateJoinAccumulator closed"; - var prev = joinRequestAccumulator.put(sender, Tuple.tuple(compatibilityVersions, joinListener)); + var prev = joinRequestAccumulator.put(sender, new JoinInformation(compatibilityVersions, features, joinListener)); if (prev != null) { - prev.v2().onFailure(new CoordinationStateRejectedException("received a newer join from " + sender)); + prev.listener().onFailure(new CoordinationStateRejectedException("received a newer join from " + sender)); } } @@ -495,9 +517,10 @@ public void close(Mode newMode) { final var data = entry.getValue(); return new JoinTask.NodeJoinTask( discoveryNode, - data.v1(), + data.compatibilityVersions(), + data.features(), joinReasonService.getJoinReason(discoveryNode, Mode.CANDIDATE), - data.v2() + data.listener() ); }), joiningTerm); latestStoredStateSupplier.accept(new ActionListener<>() { @@ -516,13 +539,13 @@ public void onFailure(Exception e) { Strings.format("failed to retrieve latest stored state after winning election in term [%d]", joiningTerm), e ); - joinRequestAccumulator.values().forEach(joinCallback -> joinCallback.v2().onFailure(e)); + joinRequestAccumulator.values().forEach(joinCallback -> joinCallback.listener().onFailure(e)); } }, joiningTerm); } else { assert newMode == Mode.FOLLOWER : newMode; joinRequestAccumulator.values() - .forEach(joinCallback -> joinCallback.v2().onFailure(new CoordinationStateRejectedException("became follower"))); + .forEach(joinCallback -> joinCallback.listener().onFailure(new CoordinationStateRejectedException("became follower"))); } // CandidateJoinAccumulator is only closed when becoming leader or follower, otherwise it accumulates all joins received diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java index d8958f75c7aa5..2ba65873738a0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java @@ -19,6 +19,7 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; public class JoinRequest extends TransportRequest { @@ -32,6 +33,11 @@ public class JoinRequest extends TransportRequest { */ private final CompatibilityVersions compatibilityVersions; + /** + * The features that are supported by the joining node. + */ + private final Set features; + /** * The minimum term for which the joining node will accept any cluster state publications. If the joining node is in a strictly greater * term than the master it wants to join then the master must enter a new term and hold another election. Doesn't necessarily match @@ -50,12 +56,14 @@ public class JoinRequest extends TransportRequest { public JoinRequest( DiscoveryNode sourceNode, CompatibilityVersions compatibilityVersions, + Set features, long minimumTerm, Optional optionalJoin ) { assert optionalJoin.isPresent() == false || optionalJoin.get().getSourceNode().equals(sourceNode); this.sourceNode = sourceNode; this.compatibilityVersions = compatibilityVersions; + this.features = features; this.minimumTerm = minimumTerm; this.optionalJoin = optionalJoin; } @@ -70,6 +78,11 @@ public JoinRequest(StreamInput in) throws IOException { // no known mapping versions here compatibilityVersions = new CompatibilityVersions(TransportVersion.fromId(sourceNode.getVersion().id), Map.of()); } + if (in.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { + features = in.readCollectionAsSet(StreamInput::readString); + } else { + features = Set.of(); + } minimumTerm = in.readLong(); optionalJoin = Optional.ofNullable(in.readOptionalWriteable(Join::new)); } @@ -81,6 +94,9 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0)) { compatibilityVersions.writeTo(out); } + if (out.getTransportVersion().onOrAfter(TransportVersions.CLUSTER_FEATURES_ADDED)) { + out.writeCollection(features, StreamOutput::writeString); + } out.writeLong(minimumTerm); out.writeOptionalWriteable(optionalJoin.orElse(null)); } @@ -93,6 +109,10 @@ public CompatibilityVersions getCompatibilityVersions() { return compatibilityVersions; } + public Set getFeatures() { + return features; + } + public long getMinimumTerm() { return minimumTerm; } @@ -118,12 +138,13 @@ public boolean equals(Object o) { if (minimumTerm != that.minimumTerm) return false; if (sourceNode.equals(that.sourceNode) == false) return false; if (compatibilityVersions.equals(that.compatibilityVersions) == false) return false; + if (features.equals(that.features) == false) return false; return optionalJoin.equals(that.optionalJoin); } @Override public int hashCode() { - return Objects.hash(sourceNode, compatibilityVersions, minimumTerm, optionalJoin); + return Objects.hash(sourceNode, compatibilityVersions, features, minimumTerm, optionalJoin); } @Override @@ -133,6 +154,8 @@ public String toString() { + sourceNode + ", compatibilityVersions=" + compatibilityVersions + + ", features=" + + features + ", minimumTerm=" + minimumTerm + ", optionalJoin=" diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java index ac1c4e888e6ca..198912646cd65 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java @@ -17,6 +17,7 @@ import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.stream.Stream; public record JoinTask(List nodeJoinTasks, boolean isBecomingMaster, long term, ClusterState initialState) @@ -26,11 +27,12 @@ public record JoinTask(List nodeJoinTasks, boolean isBecomingMaste public static JoinTask singleNode( DiscoveryNode node, CompatibilityVersions compatibilityVersions, + Set features, JoinReason reason, ActionListener listener, long term ) { - return new JoinTask(List.of(new NodeJoinTask(node, compatibilityVersions, reason, listener)), false, term, null); + return new JoinTask(List.of(new NodeJoinTask(node, compatibilityVersions, features, reason, listener)), false, term, null); } public static JoinTask completingElection(Stream nodeJoinTaskStream, long term) { @@ -78,6 +80,7 @@ public JoinTask alsoRefreshState(ClusterState latestState) { public record NodeJoinTask( DiscoveryNode node, CompatibilityVersions compatibilityVersions, + Set features, JoinReason reason, ActionListener listener ) { @@ -85,11 +88,13 @@ public record NodeJoinTask( public NodeJoinTask( DiscoveryNode node, CompatibilityVersions compatibilityVersions, + Set features, JoinReason reason, ActionListener listener ) { this.node = Objects.requireNonNull(node); this.compatibilityVersions = Objects.requireNonNull(compatibilityVersions); + this.features = Objects.requireNonNull(features); this.reason = reason; this.listener = listener; } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java index 170648452d141..00086c42ed4ae 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeJoinExecutor.java @@ -25,8 +25,8 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import java.util.ArrayList; @@ -122,6 +122,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(newState.nodes()); Map compatibilityVersionsMap = new HashMap<>(newState.compatibilityVersions()); + Map> nodeFeatures = new HashMap<>(newState.nodeFeatures()); assert nodesBuilder.isLocalNodeElectedMaster(); @@ -138,12 +139,23 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex final DiscoveryNode node = nodeJoinTask.node(); if (currentNodes.nodeExistsWithSameRoles(node)) { logger.debug("received a join request for an existing node [{}]", node); + + // update the node's feature set if it has one + // this can happen if the master has just moved from a pre-features version to a post-features version + assert Version.V_8_12_0.onOrBefore(Version.CURRENT) : "This can be removed once 8.12.0 is no longer a valid version"; + if (Objects.equals(nodeFeatures.get(node.getId()), nodeJoinTask.features()) == false) { + logger.debug("updating node [{}] features {}", node.getId(), nodeJoinTask.features()); + nodeFeatures.put(node.getId(), nodeJoinTask.features()); + nodesChanged = true; + } } else { try { CompatibilityVersions compatibilityVersions = nodeJoinTask.compatibilityVersions(); + Set features = nodeJoinTask.features(); if (enforceVersionBarrier) { ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); CompatibilityVersions.ensureVersionsCompatibility(compatibilityVersions, compatibilityVersionsMap.values()); + // TODO: enforce feature ratchet barrier } blockForbiddenVersions(compatibilityVersions.transportVersion()); ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); @@ -152,6 +164,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex ensureIndexCompatibility(node.getMinIndexVersion(), node.getMaxIndexVersion(), initialState.getMetadata()); nodesBuilder.add(node); compatibilityVersionsMap.put(node.getId(), compatibilityVersions); + nodeFeatures.put(node.getId(), features); nodesChanged = true; minClusterNodeVersion = Version.min(minClusterNodeVersion, node.getVersion()); maxClusterNodeVersion = Version.max(maxClusterNodeVersion, node.getVersion()); @@ -222,7 +235,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex } final ClusterState clusterStateWithNewNodesAndDesiredNodes = DesiredNodes.updateDesiredNodesStatusIfNeeded( - newState.nodes(nodesBuilder).nodeIdsToCompatibilityVersions(compatibilityVersionsMap).build() + newState.nodes(nodesBuilder).nodeIdsToCompatibilityVersions(compatibilityVersionsMap).nodeFeatures(nodeFeatures).build() ); final ClusterState updatedState = allocationService.adaptAutoExpandReplicas(clusterStateWithNewNodesAndDesiredNodes); assert enforceVersionBarrier == false @@ -240,11 +253,6 @@ public ClusterState execute(BatchExecutionContext batchExecutionContex } } - @SuppressForbidden(reason = "maintaining ClusterState#compatibilityVersions requires reading them") - private static Map getCompatibilityVersions(ClusterState clusterState) { - return clusterState.compatibilityVersions(); - } - protected ClusterState.Builder becomeMasterAndTrimConflictingNodes( ClusterState currentState, List> taskContexts, @@ -264,9 +272,13 @@ protected ClusterState.Builder becomeMasterAndTrimConflictingNodes( assert currentState.nodes().getMasterNodeId() == null : currentState; assert currentState.term() < term : term + " vs " + currentState; - DiscoveryNodes currentNodes = currentState.nodes(); + + ClusterState.Builder builder = ClusterState.builder(currentState); + + DiscoveryNodes currentNodes = builder.nodes(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes); - Map compatibilityVersions = new HashMap<>(getCompatibilityVersions(currentState)); + Map compatibilityVersions = new HashMap<>(builder.compatibilityVersions()); + Map> nodeFeatures = new HashMap<>(builder.nodeFeatures()); nodesBuilder.masterNodeId(currentState.nodes().getLocalNodeId()); nodesBuilder.resetNodeLeftGeneration(); @@ -277,6 +289,7 @@ protected ClusterState.Builder becomeMasterAndTrimConflictingNodes( logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameId, joiningNode); nodesBuilder.remove(nodeWithSameId.getId()); compatibilityVersions.remove(nodeWithSameId.getId()); + nodeFeatures.remove(nodeWithSameId.getId()); } final DiscoveryNode nodeWithSameAddress = currentNodes.findByAddress(joiningNode.getAddress()); if (nodeWithSameAddress != null && nodeWithSameAddress.equals(joiningNode) == false) { @@ -287,15 +300,16 @@ protected ClusterState.Builder becomeMasterAndTrimConflictingNodes( ); nodesBuilder.remove(nodeWithSameAddress.getId()); compatibilityVersions.remove(nodeWithSameAddress.getId()); + nodeFeatures.remove(nodeWithSameAddress.getId()); } } } // now trim any left over dead nodes - either left there when the previous master stepped down // or removed by us above - ClusterState tmpState = ClusterState.builder(currentState) - .nodes(nodesBuilder) + ClusterState tmpState = builder.nodes(nodesBuilder) .nodeIdsToCompatibilityVersions(compatibilityVersions) + .nodeFeatures(nodeFeatures) .blocks(ClusterBlocks.builder().blocks(currentState.blocks()).removeGlobalBlock(NoMasterBlockService.NO_MASTER_BLOCK_ID)) .metadata( Metadata.builder(currentState.metadata()) @@ -328,7 +342,7 @@ private static void blockForbiddenVersions(TransportVersion joiningTransportVers * Ensures that all indices are compatible with the given index version. This will ensure that all indices in the given metadata * will not be created with a newer version of elasticsearch as well as that all indices are newer or equal to the minimum index * compatibility version. - * @see IndexVersion#MINIMUM_COMPATIBLE + * @see IndexVersions#MINIMUM_COMPATIBLE * @throws IllegalStateException if any index is incompatible with the given version */ public static void ensureIndexCompatibility(IndexVersion minSupportedVersion, IndexVersion maxSupportedVersion, Metadata metadata) { diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeLeftExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeLeftExecutor.java index 39230d0255ae7..5cbe742aec628 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeLeftExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeLeftExecutor.java @@ -18,11 +18,11 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import java.util.HashMap; import java.util.Map; +import java.util.Set; public class NodeLeftExecutor implements ClusterStateTaskExecutor { @@ -50,16 +50,14 @@ public NodeLeftExecutor(AllocationService allocationService) { this.allocationService = allocationService; } - @SuppressForbidden(reason = "maintaining ClusterState#compatibilityVersions requires reading them") - private static Map getCompatibilityVersions(ClusterState clusterState) { - return clusterState.compatibilityVersions(); - } - @Override public ClusterState execute(BatchExecutionContext batchExecutionContext) throws Exception { ClusterState initialState = batchExecutionContext.initialState(); + + ClusterState.Builder builder = ClusterState.builder(initialState); DiscoveryNodes.Builder remainingNodesBuilder = DiscoveryNodes.builder(initialState.nodes()); - Map compatibilityVersions = new HashMap<>(getCompatibilityVersions(initialState)); + Map compatibilityVersions = new HashMap<>(builder.compatibilityVersions()); + Map> nodeFeatures = new HashMap<>(builder.nodeFeatures()); boolean removed = false; for (final var taskContext : batchExecutionContext.taskContexts()) { final var task = taskContext.getTask(); @@ -67,6 +65,7 @@ public ClusterState execute(BatchExecutionContext batchExecutionContext) t if (initialState.nodes().nodeExists(task.node())) { remainingNodesBuilder.remove(task.node()); compatibilityVersions.remove(task.node().getId()); + nodeFeatures.remove(task.node().getId()); removed = true; reason = task.reason(); } else { @@ -89,7 +88,11 @@ public ClusterState execute(BatchExecutionContext batchExecutionContext) t try (var ignored = batchExecutionContext.dropHeadersContext()) { // suppress deprecation warnings e.g. from reroute() - final var remainingNodesClusterState = remainingNodesClusterState(initialState, remainingNodesBuilder, compatibilityVersions); + final var remainingNodesClusterState = builder.nodes(remainingNodesBuilder) + .nodeIdsToCompatibilityVersions(compatibilityVersions) + .nodeFeatures(nodeFeatures) + .build(); + remainingNodesClusterState(remainingNodesClusterState); final var ptasksDisassociatedState = PersistentTasksCustomMetadata.disassociateDeadNodes(remainingNodesClusterState); return allocationService.disassociateDeadNodes( ptasksDisassociatedState, @@ -102,15 +105,5 @@ public ClusterState execute(BatchExecutionContext batchExecutionContext) t // visible for testing // hook is used in testing to ensure that correct cluster state is used to test whether a // rejoin or reroute is needed - protected ClusterState remainingNodesClusterState( - ClusterState currentState, - DiscoveryNodes.Builder remainingNodesBuilder, - Map compatibilityVersions - ) { - return ClusterState.builder(currentState) - .nodes(remainingNodesBuilder) - .nodeIdsToCompatibilityVersions(compatibilityVersions) - .build(); - } - + void remainingNodesClusterState(ClusterState state) {} } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java index 9709e149b28d1..742b52365c8d7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetadata.java @@ -45,6 +45,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -349,7 +350,7 @@ public static APIBlock readFrom(StreamInput input) throws IOException { public static final Setting SETTING_INDEX_VERSION_CREATED = Setting.versionIdSetting( SETTING_VERSION_CREATED, - IndexVersion.ZERO, + IndexVersions.ZERO, IndexVersion::fromId, Property.IndexScope, Property.PrivateIndex @@ -1057,7 +1058,7 @@ public IndexVersion getCreationVersion() { /** * Return the {@link IndexVersion} that this index provides compatibility for. - * This is typically compared to the {@link IndexVersion#MINIMUM_COMPATIBLE} to figure out whether the index can be handled + * This is typically compared to the {@link IndexVersions#MINIMUM_COMPATIBLE} to figure out whether the index can be handled * by the cluster. * By default, this is equal to the {@link #getCreationVersion()}, but can also be a newer version if the index has been imported as * a legacy index from an older snapshot, and its metadata has been converted to be handled by newer version nodes. @@ -2190,7 +2191,7 @@ IndexMetadata build(boolean repair) { var aliasesMap = aliases.build(); for (AliasMetadata alias : aliasesMap.values()) { if (alias.alias().equals(index)) { - if (repair && indexCreatedVersion.equals(IndexVersion.V_8_5_0)) { + if (repair && indexCreatedVersion.equals(IndexVersions.V_8_5_0)) { var updatedBuilder = ImmutableOpenMap.builder(aliasesMap); final var brokenAlias = updatedBuilder.remove(index); final var fixedAlias = AliasMetadata.newAliasMetadata(brokenAlias, index + "-alias-corrupted-by-8-5"); @@ -2505,7 +2506,9 @@ public static IndexMetadata fromXContent(XContentParser parser, Map { assert mappingsByHash != null : "no deduplicated mappings given"; if (mappingsByHash.containsKey(parser.text()) == false) { - throw new IllegalArgumentException("mapping with hash [" + parser.text() + "] not found"); + throw new IllegalArgumentException( + "mapping of index [" + builder.index + "] with hash [" + parser.text() + "] not found" + ); } builder.putMapping(mappingsByHash.get(parser.text())); } @@ -2520,7 +2523,7 @@ public static IndexMetadata fromXContent(XContentParser parser, Map map */ private static IndexVersion indexCreatedVersion(Settings indexSettings) { IndexVersion indexVersion = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings); - if (indexVersion.equals(IndexVersion.ZERO)) { + if (indexVersion.equals(IndexVersions.ZERO)) { final String message = String.format( Locale.ROOT, "[%s] is not present in the index settings for index with UUID [%s]", @@ -2676,7 +2679,7 @@ private static IndexVersion indexCreatedVersion(Settings indexSettings) { public static Settings addHumanReadableSettings(Settings settings) { Settings.Builder builder = Settings.builder().put(settings); IndexVersion version = SETTING_INDEX_VERSION_CREATED.get(settings); - if (version.equals(IndexVersion.ZERO) == false) { + if (version.equals(IndexVersions.ZERO) == false) { builder.put(SETTING_VERSION_CREATED_STRING, version.toString()); } Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 904dc5eb933b4..b50b1e0a74d93 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.indices.SystemIndices; @@ -63,7 +64,7 @@ public class IndexNameExpressionResolver { public static final String EXCLUDED_DATA_STREAMS_KEY = "es.excluded_ds"; public static final Version SYSTEM_INDEX_ENFORCEMENT_VERSION = Version.V_8_0_0; - public static final IndexVersion SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION = IndexVersion.V_8_0_0; + public static final IndexVersion SYSTEM_INDEX_ENFORCEMENT_INDEX_VERSION = IndexVersions.V_8_0_0; private final ThreadContext threadContext; private final SystemIndices systemIndices; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 3302549a1b860..da24f0b9d0dc5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -60,6 +60,7 @@ import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -1088,7 +1089,7 @@ static Settings aggregateIndexSettings( private static void validateSoftDeleteSettings(Settings indexSettings) { if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexSettings) == false - && IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings).onOrAfter(IndexVersion.V_8_0_0)) { + && IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings).onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "Creating indices with soft-deletes disabled is no longer supported. " + "Please do not specify a value for setting [index.soft_deletes.enabled]." @@ -1591,7 +1592,7 @@ static void prepareResizeIndexSettings( * the less default split operations are supported */ public static int calculateNumRoutingShards(int numShards, IndexVersion indexVersionCreated) { - if (indexVersionCreated.onOrAfter(IndexVersion.V_7_0_0)) { + if (indexVersionCreated.onOrAfter(IndexVersions.V_7_0_0)) { // only select this automatically for indices that are created on or after 7.0 this will prevent this new behaviour // until we have a fully upgraded cluster. Additionally it will make integrating testing easier since mixed clusters // will always have the behavior of the min node in the cluster. @@ -1609,7 +1610,7 @@ public static int calculateNumRoutingShards(int numShards, IndexVersion indexVer } public static void validateTranslogRetentionSettings(Settings indexSettings) { - if (IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings).onOrAfter(IndexVersion.V_8_0_0) + if (IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings).onOrAfter(IndexVersions.V_8_0_0) && (IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.exists(indexSettings) || IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.exists(indexSettings))) { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java index f02e389157540..93ef161c255f8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/TemplateUpgradeService.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; @@ -64,7 +63,6 @@ public class TemplateUpgradeService implements ClusterStateListener { private Map lastTemplateMetadata; - @SuppressWarnings("this-escape") public TemplateUpgradeService( Client client, ClusterService clusterService, @@ -81,9 +79,6 @@ public TemplateUpgradeService( } return upgradedTemplates; }; - if (DiscoveryNode.isMasterNode(clusterService.getSettings())) { - clusterService.addListener(this); - } } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 554bdce72a2ff..43f117acbd9fe 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.util.StringLiteralDeduplicator; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.node.Node; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; @@ -31,6 +32,7 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.OptionalInt; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; @@ -295,7 +297,7 @@ private static VersionInformation inferVersionInformation(Version version) { IndexVersion.fromId(version.id) ); } else { - return new VersionInformation(version, IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()); + return new VersionInformation(version, IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()); } } @@ -487,6 +489,16 @@ public Version getVersion() { return this.versionInfo.nodeVersion(); } + public OptionalInt getPre811VersionId() { + // Even if Version is removed from this class completely it will need to read the version ID + // off the wire for old node versions, so the value of this variable can be obtained from that + int versionId = versionInfo.nodeVersion().id; + if (versionId >= Version.V_8_11_0.id) { + return OptionalInt.empty(); + } + return OptionalInt.of(versionId); + } + public IndexVersion getMinIndexVersion() { return versionInfo.minIndexVersion(); } diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index 3ee28437ff81c..cd2c927d87f69 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -21,6 +21,7 @@ import org.elasticsearch.core.Booleans; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import java.io.IOException; import java.util.ArrayList; @@ -881,7 +882,7 @@ public DiscoveryNodes build() { Objects.requireNonNullElse(maxNodeVersion, Version.CURRENT), Objects.requireNonNullElse(minNodeVersion, Version.CURRENT.minimumCompatibilityVersion()), Objects.requireNonNullElse(maxDataNodeCompatibleIndexVersion, IndexVersion.current()), - Objects.requireNonNullElse(minSupportedIndexVersion, IndexVersion.MINIMUM_COMPATIBLE), + Objects.requireNonNullElse(minSupportedIndexVersion, IndexVersions.MINIMUM_COMPATIBLE), computeTiersToNodesMap(dataNodes) ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java b/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java index a2e2e801db958..7cb140ee42c03 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/VersionInformation.java @@ -10,6 +10,7 @@ import org.elasticsearch.Version; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import java.util.Objects; @@ -23,7 +24,7 @@ public record VersionInformation(Version nodeVersion, IndexVersion minIndexVersi public static final VersionInformation CURRENT = new VersionInformation( Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java index 15710da073c8e..0c5f547d1cb10 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorService.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.HealthIndicatorDetails; @@ -44,6 +45,7 @@ import org.elasticsearch.health.SimpleHealthIndicatorDetails; import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import java.util.ArrayList; @@ -132,6 +134,8 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources } } } + + status.updateSearchableSnapshotsOfAvailableIndices(); return createIndicator( status.getStatus(), status.getSymptom(), @@ -143,6 +147,7 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources // Impact IDs public static final String PRIMARY_UNASSIGNED_IMPACT_ID = "primary_unassigned"; + public static final String READ_ONLY_PRIMARY_UNASSIGNED_IMPACT_ID = "read_only_primary_unassigned"; public static final String REPLICA_UNASSIGNED_IMPACT_ID = "replica_unassigned"; public static final String RESTORE_FROM_SNAPSHOT_ACTION_GUIDE = "https://ela.st/restore-snapshot"; @@ -391,7 +396,6 @@ public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResources ); private class ShardAllocationCounts { - private boolean available = true; // This will be true even if no replicas are expected, as long as none are unavailable private int unassigned = 0; private int unassigned_new = 0; private int unassigned_restarting = 0; @@ -399,14 +403,22 @@ private class ShardAllocationCounts { private int started = 0; private int relocating = 0; private final Set indicesWithUnavailableShards = new HashSet<>(); + // We keep the searchable snapshots separately as long as the original index is still available + // This is checked during the post-processing + private final SearchableSnapshotsState searchableSnapshotsState = new SearchableSnapshotsState(); private final Map> diagnosisDefinitions = new HashMap<>(); public void increment(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { boolean isNew = isUnassignedDueToNewInitialization(routing, state); boolean isRestarting = isUnassignedDueToTimelyRestart(routing, shutdowns); - available &= routing.active() || isRestarting || isNew; if ((routing.active() || isRestarting || isNew) == false) { - indicesWithUnavailableShards.add(routing.getIndexName()); + String indexName = routing.getIndexName(); + Settings indexSettings = state.getMetadata().index(indexName).getSettings(); + if (SearchableSnapshotsSettings.isSearchableSnapshotStore(indexSettings)) { + searchableSnapshotsState.addSearchableSnapshotWithUnavailableShard(indexName); + } else { + indicesWithUnavailableShards.add(indexName); + } } switch (routing.state()) { @@ -435,6 +447,10 @@ public void increment(ShardRouting routing, ClusterState state, NodesShutdownMet } } + public boolean areAllAvailable() { + return indicesWithUnavailableShards.isEmpty(); + } + private void addDefinition(Diagnosis.Definition diagnosisDefinition, String indexName) { diagnosisDefinitions.computeIfAbsent(diagnosisDefinition, (k) -> new HashSet<>()).add(indexName); } @@ -797,18 +813,26 @@ class ShardAllocationStatus { this.clusterMetadata = clusterMetadata; } - public void addPrimary(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { + void addPrimary(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { primaries.increment(routing, state, shutdowns, verbose); } - public void addReplica(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { + void addReplica(ShardRouting routing, ClusterState state, NodesShutdownMetadata shutdowns, boolean verbose) { replicas.increment(routing, state, shutdowns, verbose); } + void updateSearchableSnapshotsOfAvailableIndices() { + // Searchable snapshots do not have replicas, so this post-processing is not applicable for the replicas + primaries.searchableSnapshotsState.updateSearchableSnapshotWithAvailableIndices( + clusterMetadata, + primaries.indicesWithUnavailableShards + ); + } + public HealthStatus getStatus() { - if (primaries.available == false) { + if (primaries.areAllAvailable() == false || primaries.searchableSnapshotsState.getRedSearchableSnapshots().isEmpty() == false) { return RED; - } else if (replicas.available == false) { + } else if (replicas.areAllAvailable() == false) { return YELLOW; } else { return GREEN; @@ -840,6 +864,18 @@ public String getSymptom() { } else { builder.append("all shards available."); } + if (primaries.areAllAvailable() + && primaries.searchableSnapshotsState.searchableSnapshotWithOriginalIndexAvailable.isEmpty() == false) { + if (primaries.unassigned == 1) { + builder.append( + " This is a mounted shard and the original shard is available, so there are no data availability problems." + ); + } else { + builder.append( + " These are mounted shards and the original shards are available, so there are no data availability problems." + ); + } + } return builder.toString(); } @@ -902,6 +938,25 @@ public List getImpacts() { ) ); } + Set readOnlyIndicesWithUnavailableShards = primaries.searchableSnapshotsState.getRedSearchableSnapshots(); + if (readOnlyIndicesWithUnavailableShards.isEmpty() == false) { + String impactDescription = String.format( + Locale.ROOT, + "Searching %d %s [%s] might return incomplete results.", + readOnlyIndicesWithUnavailableShards.size(), + readOnlyIndicesWithUnavailableShards.size() == 1 ? "index" : "indices", + getTruncatedIndices(readOnlyIndicesWithUnavailableShards, clusterMetadata) + ); + impacts.add( + new HealthIndicatorImpact( + NAME, + READ_ONLY_PRIMARY_UNASSIGNED_IMPACT_ID, + 1, + impactDescription, + List.of(ImpactArea.SEARCH) + ) + ); + } /* * It is possible that we're working with an intermediate cluster state, and that for an index we have no primary but a replica * that is reported as unavailable. That replica is likely being promoted to primary. The only impact that matters at this @@ -1048,4 +1103,36 @@ static List getRestoreFromSnapshotAffectedResources( return affectedResources; } } + + static class SearchableSnapshotsState { + private final Set searchableSnapshotWithUnavailableShard = new HashSet<>(); + private final Set searchableSnapshotWithOriginalIndexAvailable = new HashSet<>(); + + void addSearchableSnapshotWithUnavailableShard(String indexName) { + searchableSnapshotWithUnavailableShard.add(indexName); + } + + void addSearchableSnapshotWithOriginalIndexAvailable(String indexName) { + searchableSnapshotWithOriginalIndexAvailable.add(indexName); + } + + Set getRedSearchableSnapshots() { + return Sets.difference(searchableSnapshotWithUnavailableShard, searchableSnapshotWithOriginalIndexAvailable); + } + + // If the original index of a searchable snapshot with unavailable shards is available then we remove the searchable snapshot + // from the list of the unavailable searchable snapshots because the data is available via the original index. + void updateSearchableSnapshotWithAvailableIndices(Metadata clusterMetadata, Set indicesWithUnavailableShards) { + for (String index : searchableSnapshotWithUnavailableShard) { + assert clusterMetadata.index(index) != null : "Index metadata of index '" + index + "' should not be null"; + Settings indexSettings = clusterMetadata.index(index).getSettings(); + String originalIndex = indexSettings.get(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY); + if (originalIndex != null + && clusterMetadata.indices().containsKey(originalIndex) != false + && indicesWithUnavailableShards.contains(originalIndex) == false) { + addSearchableSnapshotWithOriginalIndexAvailable(index); + } + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java index 279c774127e04..625591ba8b90b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceReconciler.java @@ -28,7 +28,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.gateway.PriorityComparator; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.threadpool.ThreadPool; @@ -145,7 +145,7 @@ private boolean allocateUnassignedInvariant() { final var shardCounts = allocation.metadata().stream().filter(indexMetadata -> // skip any pre-7.2 closed indices which have no routing table entries at all - indexMetadata.getCreationVersion().onOrAfter(IndexVersion.V_7_2_0) + indexMetadata.getCreationVersion().onOrAfter(IndexVersions.V_7_2_0) || indexMetadata.getState() == IndexMetadata.State.OPEN || MetadataIndexStateService.isIndexVerifiedBeforeClosed(indexMetadata)) .flatMap( diff --git a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java index cc6c9a467c2fe..568f2968c9e61 100644 --- a/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java +++ b/server/src/main/java/org/elasticsearch/common/blobstore/OperationPurpose.java @@ -16,6 +16,7 @@ */ public enum OperationPurpose { SNAPSHOT("Snapshot"), + REPOSITORY_ANALYSIS("RepositoryAnalysis"), CLUSTER_STATE("ClusterState"), INDICES("Indices"), TRANSLOG("Translog"); diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index 39d5d90cc6c04..ceff7bf41c587 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -581,5 +581,19 @@ public static double quantizeLat(double lat) { return GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat)); } + /** + * Transforms the provided longitude to the previous longitude in lucene quantize space. + */ + public static double quantizeLonDown(double lon) { + return GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon) - 1); + } + + /** + * Transforms the provided latitude to the next latitude in lucene quantize space. + */ + public static double quantizeLatUp(double lat) { + return GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat) + 1); + } + private GeoUtils() {} } diff --git a/server/src/main/java/org/elasticsearch/common/hash/Murmur3Hasher.java b/server/src/main/java/org/elasticsearch/common/hash/Murmur3Hasher.java index 0d4d6fd4f61f2..b85e3107ba09d 100644 --- a/server/src/main/java/org/elasticsearch/common/hash/Murmur3Hasher.java +++ b/server/src/main/java/org/elasticsearch/common/hash/Murmur3Hasher.java @@ -8,9 +8,6 @@ package org.elasticsearch.common.hash; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Numbers; - /** * Wraps {@link MurmurHash3} to provide an interface similar to {@link java.security.MessageDigest} that * allows hashing of byte arrays passed through multiple calls to {@link #update(byte[])}. Like @@ -35,38 +32,45 @@ public Murmur3Hasher(long seed) { /** * Supplies some or all of the bytes to be hashed. Multiple calls to this method may - * be made to sequentially supply the bytes for hashing. Once all bytes have been supplied, the - * {@link #digest()} method should be called to complete the hash calculation. + * be made to sequentially supply the bytes for hashing. Once all bytes have been supplied, either the + * {@link #digestHash} method (preferred) or the {@link #digest()} method should be called to complete the hash calculation. */ public void update(byte[] inputBytes) { - int totalLength = remainderLength + inputBytes.length; - if (totalLength >= 16) { - // hash as many bytes as available in integer multiples of 16 - int numBytesToHash = totalLength & 0xFFFFFFF0; - byte[] bytesToHash; + update(inputBytes, 0, inputBytes.length); + } + + private void update(byte[] inputBytes, int offset, int length) { + if (remainderLength + length >= remainder.length) { if (remainderLength > 0) { - bytesToHash = new byte[numBytesToHash]; - System.arraycopy(remainder, 0, bytesToHash, 0, remainderLength); - System.arraycopy(inputBytes, 0, bytesToHash, remainderLength, numBytesToHash - remainderLength); - } else { - bytesToHash = inputBytes; - } + // fill rest of remainder from inputBytes and hash remainder + int bytesToCopyFromInputToRemainder = remainder.length - remainderLength; + System.arraycopy(inputBytes, offset, remainder, remainderLength, bytesToCopyFromInputToRemainder); + offset = bytesToCopyFromInputToRemainder; + length = length - bytesToCopyFromInputToRemainder; - MurmurHash3.IntermediateResult result = MurmurHash3.intermediateHash(bytesToHash, 0, numBytesToHash, h1, h2); - h1 = result.h1; - h2 = result.h2; - this.length += numBytesToHash; + MurmurHash3.IntermediateResult result = MurmurHash3.intermediateHash(remainder, 0, remainder.length, h1, h2); + h1 = result.h1; + h2 = result.h2; + remainderLength = 0; + this.length += remainder.length; + } + // hash as many bytes as available in integer multiples of 16 as intermediateHash can only process multiples of 16 + int numBytesToHash = length & 0xFFFFFFF0; + if (numBytesToHash > 0) { + MurmurHash3.IntermediateResult result = MurmurHash3.intermediateHash(inputBytes, offset, numBytesToHash, h1, h2); + h1 = result.h1; + h2 = result.h2; + this.length += numBytesToHash; + } // save the remaining bytes, if any - if (totalLength > numBytesToHash) { - System.arraycopy(inputBytes, numBytesToHash - remainderLength, remainder, 0, totalLength - numBytesToHash); - remainderLength = totalLength - numBytesToHash; - } else { - remainderLength = 0; + if (length > numBytesToHash) { + this.remainderLength = length - numBytesToHash; + System.arraycopy(inputBytes, offset + numBytesToHash, remainder, 0, remainderLength); } } else { - System.arraycopy(inputBytes, 0, remainder, remainderLength, inputBytes.length); - remainderLength += inputBytes.length; + System.arraycopy(inputBytes, 0, remainder, remainderLength, length); + remainderLength += length; } } @@ -81,29 +85,30 @@ public void reset() { } /** - * Completes the hash of all bytes previously passed to {@link #update(byte[])}. + * Completes the hash of all bytes previously passed to {@link #update}. */ public byte[] digest() { - length += remainderLength; - MurmurHash3.Hash128 h = MurmurHash3.finalizeHash(new MurmurHash3.Hash128(), remainder, 0, length, h1, h2); - byte[] hash = new byte[16]; - System.arraycopy(Numbers.longToBytes(h.h1), 0, hash, 0, 8); - System.arraycopy(Numbers.longToBytes(h.h2), 0, hash, 8, 8); - return hash; + return digestHash().getBytes(); } - public static String getAlgorithm() { - return METHOD; + /** + * Completes the hash of all bytes previously passed to {@link #update}. + */ + public MurmurHash3.Hash128 digestHash() { + return digestHash(new MurmurHash3.Hash128()); } /** - * Converts the 128-bit byte array returned by {@link #digest()} to a - * {@link org.elasticsearch.common.hash.MurmurHash3.Hash128} + * Completes the hash of all bytes previously passed to {@link #update}. + * Allows passing in a re-usable {@link org.elasticsearch.common.hash.MurmurHash3.Hash128} instance to avoid allocations. */ - public static MurmurHash3.Hash128 toHash128(byte[] doubleLongBytes) { - MurmurHash3.Hash128 hash128 = new MurmurHash3.Hash128(); - hash128.h1 = Numbers.bytesToLong(new BytesRef(doubleLongBytes, 0, 8)); - hash128.h2 = Numbers.bytesToLong(new BytesRef(doubleLongBytes, 8, 8)); - return hash128; + public MurmurHash3.Hash128 digestHash(MurmurHash3.Hash128 hash) { + length += remainderLength; + MurmurHash3.finalizeHash(hash, remainder, 0, length, h1, h2); + return hash; + } + + public static String getAlgorithm() { + return METHOD; } } diff --git a/server/src/main/java/org/elasticsearch/common/hash/MurmurHash3.java b/server/src/main/java/org/elasticsearch/common/hash/MurmurHash3.java index 903b7a080a6ca..6d6fdbc45ec99 100644 --- a/server/src/main/java/org/elasticsearch/common/hash/MurmurHash3.java +++ b/server/src/main/java/org/elasticsearch/common/hash/MurmurHash3.java @@ -8,7 +8,6 @@ package org.elasticsearch.common.hash; -import org.elasticsearch.common.Numbers; import org.elasticsearch.common.util.ByteUtils; import java.math.BigInteger; @@ -29,6 +28,17 @@ public static class Hash128 { /** higher 64 bits part **/ public long h2; + public byte[] getBytes() { + byte[] hash = new byte[16]; + getBytes(hash, 0); + return hash; + } + + public void getBytes(byte[] bytes, int offset) { + ByteUtils.writeLongBE(h1, bytes, offset); + ByteUtils.writeLongBE(h2, bytes, offset + 8); + } + @Override public boolean equals(Object other) { if (this == other) { @@ -49,8 +59,7 @@ public int hashCode() { @Override public String toString() { byte[] longBytes = new byte[17]; - System.arraycopy(Numbers.longToBytes(h1), 0, longBytes, 1, 8); - System.arraycopy(Numbers.longToBytes(h2), 0, longBytes, 9, 8); + getBytes(longBytes, 1); BigInteger bi = new BigInteger(longBytes); return "0x" + bi.toString(16); } diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 4cd2ff52cf29a..f7c9e72d36326 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.IndexingSlowLog; import org.elasticsearch.index.MergePolicyConfig; import org.elasticsearch.index.MergeSchedulerConfig; @@ -254,8 +255,8 @@ protected void validateDeprecatedAndRemovedSettingV7(Settings settings, Setting< // IndexMetadata at hand, in which case the setting version will be empty. We don't want to // error out on those validations, we will check with the creation version present at index // creation time, as well as on index update settings. - if (indexVersion.equals(IndexVersion.ZERO) == false - && (indexVersion.before(IndexVersion.V_7_0_0) || indexVersion.onOrAfter(IndexVersion.V_8_0_0))) { + if (indexVersion.equals(IndexVersions.ZERO) == false + && (indexVersion.before(IndexVersions.V_7_0_0) || indexVersion.onOrAfter(IndexVersions.V_8_0_0))) { throw new IllegalArgumentException("unknown setting [" + setting.getKey() + "]"); } } diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java index b168513baf427..41f44dfbdedbc 100644 --- a/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatter.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import java.time.Instant; import java.time.ZoneId; @@ -119,7 +120,7 @@ static DateFormatter forPattern(String input, IndexVersion supportedVersion) { List formatters = new ArrayList<>(patterns.length); for (String pattern : patterns) { // make sure we still support camel case for indices created before 8.0 - if (supportedVersion.before(IndexVersion.V_8_0_0)) { + if (supportedVersion.before(IndexVersions.V_8_0_0)) { pattern = LegacyFormatNames.camelCaseToSnakeCase(pattern); } formatters.add(DateFormatters.forPattern(pattern)); diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 6a00077c4330e..fabc10e336368 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -111,7 +111,8 @@ public DiscoveryModule( RerouteService rerouteService, NodeHealthService nodeHealthService, CircuitBreakerService circuitBreakerService, - CompatibilityVersions compatibilityVersions + CompatibilityVersions compatibilityVersions, + Set features ) { final Collection> joinValidators = new ArrayList<>(); final Map> hostProviders = new HashMap<>(); @@ -213,7 +214,8 @@ public DiscoveryModule( reconfigurator, leaderHeartbeatService, preVoteCollectorFactory, - compatibilityVersions + compatibilityVersions, + features ); } else { throw new IllegalArgumentException("Unknown discovery type [" + discoveryType + "]"); diff --git a/server/src/main/java/org/elasticsearch/discovery/HandshakingTransportAddressConnector.java b/server/src/main/java/org/elasticsearch/discovery/HandshakingTransportAddressConnector.java index 89fd114e7852a..209faa7207be1 100644 --- a/server/src/main/java/org/elasticsearch/discovery/HandshakingTransportAddressConnector.java +++ b/server/src/main/java/org/elasticsearch/discovery/HandshakingTransportAddressConnector.java @@ -23,6 +23,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TransportRequestOptions.Type; @@ -89,7 +90,7 @@ public void connectToRemoteMasterNode(TransportAddress transportAddress, ActionL emptySet(), new VersionInformation( Version.CURRENT.minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ) ), diff --git a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java index 943cb06e9c6b5..cc685b26ce239 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/elasticsearch/env/NodeEnvironment.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.store.FsDirectoryFactory; @@ -537,7 +538,7 @@ static void checkForIndexCompatibility(Logger logger, DataPath... dataPaths) thr + "] is incompatible. Revert this node to version [" + bestDowngradeVersion + "] and delete any indices with versions earlier than [" - + IndexVersion.MINIMUM_COMPATIBLE + + IndexVersions.MINIMUM_COMPATIBLE + "] before upgrading to version [" + Build.current().version() + "]. If all such indices have already been deleted, revert this node to version [" diff --git a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java index 37dd01733e664..a714ee4cf5ec0 100644 --- a/server/src/main/java/org/elasticsearch/env/NodeMetadata.java +++ b/server/src/main/java/org/elasticsearch/env/NodeMetadata.java @@ -12,6 +12,7 @@ import org.elasticsearch.Version; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -174,7 +175,7 @@ public NodeMetadata build() { previousNodeVersion = nodeVersion; } if (this.oldestIndexVersion == null) { - oldestIndexVersion = IndexVersion.ZERO; + oldestIndexVersion = IndexVersions.ZERO; } else { oldestIndexVersion = this.oldestIndexVersion; } diff --git a/server/src/main/java/org/elasticsearch/features/FeatureService.java b/server/src/main/java/org/elasticsearch/features/FeatureService.java new file mode 100644 index 0000000000000..5d7632a91b0b8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/features/FeatureService.java @@ -0,0 +1,123 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.features; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.Strings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Set; +import java.util.TreeMap; + +/** + * Manages information on the features supported by nodes in the cluster + */ +public class FeatureService { + + /** + * A feature indicating that node features are supported. + */ + public static final NodeFeature FEATURES_SUPPORTED = new NodeFeature("features_supported"); + + private static final Logger logger = LogManager.getLogger(FeatureService.class); + + public static final Version CLUSTER_FEATURES_ADDED_VERSION = Version.V_8_12_0; + + private final NavigableMap> historicalFeatures; + private final Set nodeFeatures; + + public FeatureService(List specs) { + Map allFeatures = new HashMap<>(); + + NavigableMap> historicalFeatures = new TreeMap<>(); + Set nodeFeatures = new HashSet<>(); + for (FeatureSpecification spec : specs) { + for (var hfe : spec.getHistoricalFeatures().entrySet()) { + FeatureSpecification existing = allFeatures.putIfAbsent(hfe.getKey().id(), spec); + // the same SPI class can be loaded multiple times if it's in the base classloader + if (existing != null && existing.getClass() != spec.getClass()) { + throw new IllegalArgumentException( + Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", hfe.getKey().id(), existing, spec) + ); + } + + if (hfe.getValue().onOrAfter(CLUSTER_FEATURES_ADDED_VERSION)) { + throw new IllegalArgumentException( + Strings.format( + "Historical feature [%s] declared by [%s] for version [%s] is not a historical version", + hfe.getKey().id(), + spec, + hfe.getValue() + ) + ); + } + + historicalFeatures.computeIfAbsent(hfe.getValue(), k -> new HashSet<>()).add(hfe.getKey().id()); + } + + for (NodeFeature f : spec.getFeatures()) { + FeatureSpecification existing = allFeatures.putIfAbsent(f.id(), spec); + if (existing != null && existing.getClass() != spec.getClass()) { + throw new IllegalArgumentException( + Strings.format("Duplicate feature - [%s] is declared by both [%s] and [%s]", f.id(), existing, spec) + ); + } + + nodeFeatures.add(f.id()); + } + } + + this.historicalFeatures = consolidateHistoricalFeatures(historicalFeatures); + this.nodeFeatures = Set.copyOf(nodeFeatures); + + logger.info("Registered local node features {}", nodeFeatures.stream().sorted().toList()); + } + + private static NavigableMap> consolidateHistoricalFeatures( + NavigableMap> declaredHistoricalFeatures + ) { + // update each version by adding in all features from previous versions + Set featureAggregator = new HashSet<>(); + for (Map.Entry> versions : declaredHistoricalFeatures.entrySet()) { + featureAggregator.addAll(versions.getValue()); + versions.setValue(Set.copyOf(featureAggregator)); + } + + return Collections.unmodifiableNavigableMap(declaredHistoricalFeatures); + } + + /** + * The non-historical features supported by this node. + */ + public Set getNodeFeatures() { + return nodeFeatures; + } + + /** + * Returns {@code true} if all nodes in {@code state} support feature {@code feature}. + */ + @SuppressForbidden(reason = "We need basic feature information from cluster state") + public boolean clusterHasFeature(ClusterState state, NodeFeature feature) { + if (state.clusterFeatures().clusterHasFeature(feature)) { + return true; + } + + var features = historicalFeatures.floorEntry(state.getNodes().getMinNodeVersion()); + return features != null && features.getValue().contains(feature.id()); + } +} diff --git a/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java new file mode 100644 index 0000000000000..7df9ac7c4c203 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/features/FeatureSpecification.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.features; + +import org.elasticsearch.Version; + +import java.util.Map; +import java.util.Set; + +/** + * Specifies one or more features that are supported by this node. + */ +public interface FeatureSpecification { + /** + * Returns a set of regular features that this node supports. + */ + default Set getFeatures() { + return Set.of(); + } + + /** + * Returns information on historical features that should be added to all nodes at or above the {@link Version} specified. + */ + default Map getHistoricalFeatures() { + return Map.of(); + } +} diff --git a/server/src/main/java/org/elasticsearch/features/FeaturesSupportedSpecification.java b/server/src/main/java/org/elasticsearch/features/FeaturesSupportedSpecification.java new file mode 100644 index 0000000000000..3918d64226530 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/features/FeaturesSupportedSpecification.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.features; + +import java.util.Set; + +/** + * This adds a feature {@code features_supported} indicating that a node supports node features. + * Nodes that do not support features won't have this feature in its feature set, + * so this can be checked without needing to look at the node version. + */ +public class FeaturesSupportedSpecification implements FeatureSpecification { + + @Override + public Set getFeatures() { + return Set.of(FeatureService.FEATURES_SUPPORTED); + } +} diff --git a/server/src/main/java/org/elasticsearch/features/NodeFeature.java b/server/src/main/java/org/elasticsearch/features/NodeFeature.java new file mode 100644 index 0000000000000..aef907c952c4a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/features/NodeFeature.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.features; + +import java.util.Objects; + +/** + * A feature published by a node. + * + * @param id The feature id. Must be unique in the node. + */ +public record NodeFeature(String id) { + + public NodeFeature { + Objects.requireNonNull(id); + } +} diff --git a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index 686f03830257b..a7cf7299a8502 100644 --- a/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/server/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -35,7 +35,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Tuple; import org.elasticsearch.env.NodeMetadata; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.MetadataUpgrader; @@ -298,7 +298,7 @@ static Metadata upgradeMetadata(Metadata metadata, IndexMetadataVerifier indexMe boolean changed = false; final Metadata.Builder upgradedMetadata = Metadata.builder(metadata); for (IndexMetadata indexMetadata : metadata) { - IndexMetadata newMetadata = indexMetadataVerifier.verifyIndexMetadata(indexMetadata, IndexVersion.MINIMUM_COMPATIBLE); + IndexMetadata newMetadata = indexMetadataVerifier.verifyIndexMetadata(indexMetadata, IndexVersions.MINIMUM_COMPATIBLE); changed |= indexMetadata != newMetadata; upgradedMetadata.put(newMetadata, false); } diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index ddc98c1155055..fcf50ba3a8a44 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -71,6 +71,7 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -344,7 +345,7 @@ public record OnDiskStateMetadata( public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { String nodeId = null; Version version = null; - IndexVersion oldestIndexVersion = IndexVersion.ZERO; + IndexVersion oldestIndexVersion = IndexVersions.ZERO; for (final Path dataPath : dataPaths) { final Path indexPath = dataPath.resolve(METADATA_DIRECTORY_NAME); if (Files.exists(indexPath)) { @@ -364,7 +365,7 @@ public static NodeMetadata nodeMetadata(Path... dataPaths) throws IOException { if (userData.containsKey(OLDEST_INDEX_VERSION_KEY)) { oldestIndexVersion = IndexVersion.fromId(Integer.parseInt(userData.get(OLDEST_INDEX_VERSION_KEY))); } else { - oldestIndexVersion = IndexVersion.ZERO; + oldestIndexVersion = IndexVersions.ZERO; } } } catch (IndexNotFoundException e) { @@ -703,7 +704,7 @@ private static void consumeFromType( if (document.getField(PAGE_FIELD_NAME) == null) { // legacy format: not paginated or compressed - assert IndexVersion.MINIMUM_COMPATIBLE.before(IndexVersion.V_7_16_0); + assert IndexVersions.MINIMUM_COMPATIBLE.before(IndexVersions.V_7_16_0); bytesReferenceConsumer.accept(documentData); continue; } diff --git a/server/src/main/java/org/elasticsearch/index/IndexSettings.java b/server/src/main/java/org/elasticsearch/index/IndexSettings.java index d887ed8d1531d..83a6d9319c75a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -306,7 +306,7 @@ public void validate(final TimeValue value, final Map, Object> settin && fastRefresh == false && value.compareTo(TimeValue.ZERO) > 0 && value.compareTo(STATELESS_MIN_NON_FAST_REFRESH_INTERVAL) < 0 - && indexVersion.after(IndexVersion.V_8_10_0)) { + && indexVersion.after(IndexVersions.V_8_10_0)) { throw new IllegalArgumentException( "index setting [" + IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey() @@ -872,7 +872,7 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti mergeSchedulerConfig = new MergeSchedulerConfig(this); gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); softDeleteEnabled = scopedSettings.get(INDEX_SOFT_DELETES_SETTING); - assert softDeleteEnabled || version.before(IndexVersion.V_8_0_0) : "soft deletes must be enabled in version " + version; + assert softDeleteEnabled || version.before(IndexVersions.V_8_0_0) : "soft deletes must be enabled in version " + version; softDeleteRetentionOperations = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING); retentionLeaseMillis = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING).millis(); warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); diff --git a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java index fd1c0c33eb8a6..98c2e31838379 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java +++ b/server/src/main/java/org/elasticsearch/index/IndexSortConfig.java @@ -224,7 +224,7 @@ public Sort buildIndexSort( throw new IllegalArgumentException(err); } if (Objects.equals(ft.name(), sortSpec.field) == false) { - if (this.indexCreatedVersion.onOrAfter(IndexVersion.V_7_13_0)) { + if (this.indexCreatedVersion.onOrAfter(IndexVersions.V_7_13_0)) { throw new IllegalArgumentException("Cannot use alias [" + sortSpec.field + "] as an index sort field"); } else { DEPRECATION_LOGGER.warn( diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index 0a3b1a1be1f4d..765cc256d84b1 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -12,23 +12,13 @@ import org.elasticsearch.common.VersionId; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Assertions; import org.elasticsearch.internal.VersionExtension; import org.elasticsearch.plugins.ExtensionLoader; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.lang.reflect.Field; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.NavigableMap; import java.util.ServiceLoader; -import java.util.Set; -import java.util.TreeMap; -import java.util.TreeSet; /** * The index version. @@ -47,10 +37,10 @@ * resulting in the same index version being used across multiple commits, * causing problems when you try to upgrade between those two merged commits. *

    Version compatibility

    - * The earliest compatible version is hardcoded in the {@link #MINIMUM_COMPATIBLE} field. Previously, this was dynamically calculated - * from the major/minor versions of {@link Version}, but {@code IndexVersion} does not have separate major/minor version numbers. - * So the minimum compatible version is hard-coded as the index version used by the first version of the previous major release. - * {@link #MINIMUM_COMPATIBLE} should be updated appropriately whenever a major release happens. + * The earliest compatible version is hardcoded in the {@link IndexVersions#MINIMUM_COMPATIBLE} field. Previously, this was dynamically + * calculated from the major/minor versions of {@link Version}, but {@code IndexVersion} does not have separate major/minor version + * numbers. So the minimum compatible version is hard-coded as the index version used by the first version of the previous major release. + * {@link IndexVersions#MINIMUM_COMPATIBLE} should be updated appropriately whenever a major release happens. *

    Adding a new version

    * A new index version should be added every time a change is made to the serialization protocol of one or more classes. * Each index version should only be used in a single merged commit (apart from BwC versions copied from {@link Version}). @@ -61,98 +51,8 @@ * If you revert a commit with an index version change, you must ensure there is a new index version * representing the reverted change. Do not let the index version go backwards, it must always be incremented. */ -@SuppressWarnings({"checkstyle:linelength", "deprecation"}) public record IndexVersion(int id, Version luceneVersion) implements VersionId, ToXContentFragment { - /* - * NOTE: IntelliJ lies! - * This map is used during class construction, referenced by the registerIndexVersion method. - * When all the index version constants have been registered, the map is cleared & never touched again. - */ - @SuppressWarnings("UnusedAssignment") - static TreeSet IDS = new TreeSet<>(); - - private static IndexVersion def(int id, Version luceneVersion) { - if (IDS == null) throw new IllegalStateException("The IDS map needs to be present to call this method"); - - if (IDS.add(id) == false) { - throw new IllegalArgumentException("Version id " + id + " defined twice"); - } - if (id < IDS.last()) { - throw new IllegalArgumentException("Version id " + id + " is not defined in the right location. Keep constants sorted"); - } - return new IndexVersion(id, luceneVersion); - } - - public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); - public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); - public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); - public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); - public static final IndexVersion V_8_3_0 = def(8_03_00_99, Version.LUCENE_9_2_0); - public static final IndexVersion V_8_4_0 = def(8_04_00_99, Version.LUCENE_9_3_0); - public static final IndexVersion V_8_5_0 = def(8_05_00_99, Version.LUCENE_9_4_1); - public static final IndexVersion V_8_6_0 = def(8_06_00_99, Version.LUCENE_9_4_2); - public static final IndexVersion V_8_7_0 = def(8_07_00_99, Version.LUCENE_9_5_0); - public static final IndexVersion V_8_8_0 = def(8_08_00_99, Version.LUCENE_9_6_0); - public static final IndexVersion V_8_8_2 = def(8_08_02_99, Version.LUCENE_9_6_0); - public static final IndexVersion V_8_9_0 = def(8_09_00_99, Version.LUCENE_9_7_0); - public static final IndexVersion V_8_9_1 = def(8_09_01_99, Version.LUCENE_9_7_0); - public static final IndexVersion V_8_10_0 = def(8_10_00_99, Version.LUCENE_9_7_0); - - /* - * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW INDEX VERSIONS - * Detached index versions added below here. - */ - public static final IndexVersion FIRST_DETACHED_INDEX_VERSION = def(8_500_000, Version.LUCENE_9_7_0); - public static final IndexVersion NEW_SPARSE_VECTOR = def(8_500_001, Version.LUCENE_9_7_0); - public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); - public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); - - /* - * STOP! READ THIS FIRST! No, really, - * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ - * / ___|_ _/ _ \| _ \| | | _ \| ____| / \ | _ \ |_ _| | | |_ _/ ___| | ___|_ _| _ \/ ___|_ _| | - * \___ \ | || | | | |_) | | | |_) | _| / _ \ | | | | | | | |_| || |\___ \ | |_ | || |_) \___ \ | | | | - * ___) || || |_| | __/|_| | _ <| |___ / ___ \| |_| | | | | _ || | ___) | | _| | || _ < ___) || | |_| - * |____/ |_| \___/|_| (_) |_| \_\_____/_/ \_\____/ |_| |_| |_|___|____/ |_| |___|_| \_\____/ |_| (_) - * - * A new index version should be added EVERY TIME a change is made to index metadata or data storage. - * Each index version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, ≤V_8_11_0). - * - * To add a new index version, add a new constant at the bottom of the list, above this comment, which is one greater than the - * current highest version id. Use a descriptive constant name. Don't add other lines, comments, etc. - * - * REVERTING AN INDEX VERSION - * - * If you revert a commit with an index version change, you MUST ensure there is a NEW index version representing the reverted - * change. DO NOT let the index version go backwards, it must ALWAYS be incremented. - * - * DETERMINING TRANSPORT VERSIONS FROM GIT HISTORY - * - * TODO after the release of v8.11.0, copy the instructions about using git to track the history of versions from TransportVersion.java - * (the example commands won't make sense until at least 8.11.0 is released) - */ - private static class CurrentHolder { private static final IndexVersion CURRENT = findCurrent(); @@ -160,11 +60,11 @@ private static class CurrentHolder { private static IndexVersion findCurrent() { var versionExtension = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class), () -> null); if (versionExtension == null) { - return LATEST_DEFINED; + return IndexVersions.LATEST_DEFINED; } - var version = versionExtension.getCurrentIndexVersion(LATEST_DEFINED); + var version = versionExtension.getCurrentIndexVersion(IndexVersions.LATEST_DEFINED); - assert version.onOrAfter(LATEST_DEFINED); + assert version.onOrAfter(IndexVersions.LATEST_DEFINED); assert version.luceneVersion.equals(Version.LATEST) : "IndexVersion must be upgraded to [" + Version.LATEST @@ -175,68 +75,12 @@ private static IndexVersion findCurrent() { } } - public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; - - private static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersion.class); - - static final IndexVersion LATEST_DEFINED; - static { - LATEST_DEFINED = VERSION_IDS.lastEntry().getValue(); - - // see comment on IDS field - // now we're registered the index versions, we can clear the map - IDS = null; - } - - static NavigableMap getAllVersionIds(Class cls) { - Map versionIdFields = new HashMap<>(); - NavigableMap builder = new TreeMap<>(); - - Set ignore = Set.of("ZERO", "CURRENT", "MINIMUM_COMPATIBLE"); - - for (Field declaredField : cls.getFields()) { - if (declaredField.getType().equals(IndexVersion.class)) { - String fieldName = declaredField.getName(); - if (ignore.contains(fieldName)) { - continue; - } - - IndexVersion version; - try { - version = (IndexVersion) declaredField.get(null); - } catch (IllegalAccessException e) { - throw new AssertionError(e); - } - builder.put(version.id, version); - - if (Assertions.ENABLED) { - // check the version number is unique - var sameVersionNumber = versionIdFields.put(version.id, fieldName); - assert sameVersionNumber == null - : "Versions [" - + sameVersionNumber - + "] and [" - + fieldName - + "] have the same version number [" - + version.id - + "]. Each IndexVersion should have a different version number"; - } - } - } - - return Collections.unmodifiableNavigableMap(builder); - } - - static Collection getAllVersions() { - return VERSION_IDS.values(); - } - public static IndexVersion readVersion(StreamInput in) throws IOException { return fromId(in.readVInt()); } public static IndexVersion fromId(int id) { - IndexVersion known = VERSION_IDS.get(id); + IndexVersion known = IndexVersions.VERSION_IDS.get(id); if (known != null) { return known; } @@ -246,10 +90,10 @@ public static IndexVersion fromId(int id) { // Our best guess is to use the same lucene version as the previous // version in the list, assuming that it didn't change. // if it's older than any known version use the previous major to the oldest known lucene version - var prev = VERSION_IDS.floorEntry(id); + var prev = IndexVersions.VERSION_IDS.floorEntry(id); Version luceneVersion = prev != null ? prev.getValue().luceneVersion - : Version.fromBits(VERSION_IDS.firstEntry().getValue().luceneVersion.major - 1, 0, 0); + : Version.fromBits(IndexVersions.VERSION_IDS.firstEntry().getValue().luceneVersion.major - 1, 0, 0); return new IndexVersion(id, luceneVersion); } @@ -281,14 +125,14 @@ public static IndexVersion current() { } public boolean isLegacyIndexVersion() { - return before(MINIMUM_COMPATIBLE); + return before(IndexVersions.MINIMUM_COMPATIBLE); } public static IndexVersion getMinimumCompatibleIndexVersion(int versionId) { int major = versionId / 1_000_000; if (major == IndexVersion.current().id() / 1_000_000) { // same compatibility version as current - return IndexVersion.MINIMUM_COMPATIBLE; + return IndexVersions.MINIMUM_COMPATIBLE; } else { int compatId = (major-1) * 1_000_000; if (major <= 8) compatId += 99; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java new file mode 100644 index 0000000000000..6327c2ba53f54 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -0,0 +1,170 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index; + +import org.apache.lucene.util.Version; +import org.elasticsearch.core.Assertions; + +import java.lang.reflect.Field; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Set; +import java.util.TreeMap; +import java.util.TreeSet; + +@SuppressWarnings("deprecation") +public class IndexVersions { + + /* + * NOTE: IntelliJ lies! + * This map is used during class construction, referenced by the registerIndexVersion method. + * When all the index version constants have been registered, the map is cleared & never touched again. + */ + @SuppressWarnings("UnusedAssignment") + static TreeSet IDS = new TreeSet<>(); + + private static IndexVersion def(int id, Version luceneVersion) { + if (IDS == null) throw new IllegalStateException("The IDS map needs to be present to call this method"); + + if (IDS.add(id) == false) { + throw new IllegalArgumentException("Version id " + id + " defined twice"); + } + if (id < IDS.last()) { + throw new IllegalArgumentException("Version id " + id + " is not defined in the right location. Keep constants sorted"); + } + return new IndexVersion(id, luceneVersion); + } + + public static final IndexVersion ZERO = def(0, Version.LATEST); + public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); + + public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); + public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); + public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); + public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); + public static final IndexVersion V_8_3_0 = def(8_03_00_99, Version.LUCENE_9_2_0); + public static final IndexVersion V_8_4_0 = def(8_04_00_99, Version.LUCENE_9_3_0); + public static final IndexVersion V_8_5_0 = def(8_05_00_99, Version.LUCENE_9_4_1); + public static final IndexVersion V_8_6_0 = def(8_06_00_99, Version.LUCENE_9_4_2); + public static final IndexVersion V_8_7_0 = def(8_07_00_99, Version.LUCENE_9_5_0); + public static final IndexVersion V_8_8_0 = def(8_08_00_99, Version.LUCENE_9_6_0); + public static final IndexVersion V_8_8_2 = def(8_08_02_99, Version.LUCENE_9_6_0); + public static final IndexVersion V_8_9_0 = def(8_09_00_99, Version.LUCENE_9_7_0); + public static final IndexVersion V_8_9_1 = def(8_09_01_99, Version.LUCENE_9_7_0); + public static final IndexVersion V_8_10_0 = def(8_10_00_99, Version.LUCENE_9_7_0); + /* + * READ THE COMMENT BELOW THIS BLOCK OF DECLARATIONS BEFORE ADDING NEW INDEX VERSIONS + * Detached index versions added below here. + */ + public static final IndexVersion FIRST_DETACHED_INDEX_VERSION = def(8_500_000, Version.LUCENE_9_7_0); + public static final IndexVersion NEW_SPARSE_VECTOR = def(8_500_001, Version.LUCENE_9_7_0); + public static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT = def(8_500_002, Version.LUCENE_9_7_0); + public static final IndexVersion UPGRADE_LUCENE_9_8 = def(8_500_003, Version.LUCENE_9_8_0); + + /* + * STOP! READ THIS FIRST! No, really, + * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ + * / ___|_ _/ _ \| _ \| | | _ \| ____| / \ | _ \ |_ _| | | |_ _/ ___| | ___|_ _| _ \/ ___|_ _| | + * \___ \ | || | | | |_) | | | |_) | _| / _ \ | | | | | | | |_| || |\___ \ | |_ | || |_) \___ \ | | | | + * ___) || || |_| | __/|_| | _ <| |___ / ___ \| |_| | | | | _ || | ___) | | _| | || _ < ___) || | |_| + * |____/ |_| \___/|_| (_) |_| \_\_____/_/ \_\____/ |_| |_| |_|___|____/ |_| |___|_| \_\____/ |_| (_) + * + * A new index version should be added EVERY TIME a change is made to index metadata or data storage. + * Each index version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, ≤V_8_11_0). + * + * To add a new index version, add a new constant at the bottom of the list, above this comment, which is one greater than the + * current highest version id. Use a descriptive constant name. Don't add other lines, comments, etc. + * + * REVERTING AN INDEX VERSION + * + * If you revert a commit with an index version change, you MUST ensure there is a NEW index version representing the reverted + * change. DO NOT let the index version go backwards, it must ALWAYS be incremented. + * + * DETERMINING TRANSPORT VERSIONS FROM GIT HISTORY + * + * TODO after the release of v8.11.0, copy the instructions about using git to track the history of versions from TransportVersion.java + * (the example commands won't make sense until at least 8.11.0 is released) + */ + + public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; + + static final NavigableMap VERSION_IDS = getAllVersionIds(IndexVersions.class); + static final IndexVersion LATEST_DEFINED; + static { + LATEST_DEFINED = VERSION_IDS.lastEntry().getValue(); + + // see comment on IDS field + // now we're registered the index versions, we can clear the map + IDS = null; + } + + static NavigableMap getAllVersionIds(Class cls) { + Map versionIdFields = new HashMap<>(); + NavigableMap builder = new TreeMap<>(); + + Set ignore = Set.of("ZERO", "MINIMUM_COMPATIBLE"); + + for (Field declaredField : cls.getFields()) { + if (declaredField.getType().equals(IndexVersion.class)) { + String fieldName = declaredField.getName(); + if (ignore.contains(fieldName)) { + continue; + } + + IndexVersion version; + try { + version = (IndexVersion) declaredField.get(null); + } catch (IllegalAccessException e) { + throw new AssertionError(e); + } + builder.put(version.id(), version); + + if (Assertions.ENABLED) { + // check the version number is unique + var sameVersionNumber = versionIdFields.put(version.id(), fieldName); + assert sameVersionNumber == null + : "Versions [" + + sameVersionNumber + + "] and [" + + fieldName + + "] have the same version number [" + + version.id() + + "]. Each IndexVersion should have a different version number"; + } + } + } + + return Collections.unmodifiableNavigableMap(builder); + } + + static Collection getAllVersions() { + return VERSION_IDS.values(); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java index 256fc75733cfb..174253252416c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/ShingleTokenFilterFactory.java @@ -15,7 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.lucene.analysis.miscellaneous.DisableGraphAttribute; public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory { @@ -36,7 +36,7 @@ public ShingleTokenFilterFactory(IndexSettings indexSettings, Environment enviro int shingleDiff = maxShingleSize - minShingleSize + (outputUnigrams ? 1 : 0); if (shingleDiff > maxAllowedShingleDiff) { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_0_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_0_0)) { throw new IllegalArgumentException( "In Shingle TokenFilter the difference between max_shingle_size and min_shingle_size (and +1 if outputting unigrams)" + " must be less than or equal to: [" @@ -81,7 +81,7 @@ public TokenStream create(TokenStream tokenStream) { @Override public TokenFilterFactory getSynonymFilter() { - if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_0_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_0_0)) { throw new IllegalArgumentException("Token filter [" + name() + "] cannot be used to parse synonyms"); } else { DEPRECATION_LOGGER.warn( diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 7f896c352d958..a496429cc3e2b 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -111,6 +111,7 @@ public abstract class Engine implements Closeable { public static final String SEARCH_SOURCE = "search"; // TODO: Make source of search enum? public static final String CAN_MATCH_SEARCH_SOURCE = "can_match"; protected static final String DOC_STATS_SOURCE = "doc_stats"; + public static final long UNKNOWN_PRIMARY_TERM = -1L; protected final ShardId shardId; protected final Logger logger; @@ -2114,8 +2115,19 @@ public final EngineConfig getEngineConfig() { /** * Allows registering a listener for when the index shard is on a segment generation >= minGeneration. + * + * @deprecated use {@link #addPrimaryTermAndGenerationListener(long, long, ActionListener)} instead. */ + @Deprecated public void addSegmentGenerationListener(long minGeneration, ActionListener listener) { + addPrimaryTermAndGenerationListener(UNKNOWN_PRIMARY_TERM, minGeneration, listener); + } + + /** + * Allows registering a listener for when the index shard is on a primary term >= minPrimaryTerm + * and a segment generation >= minGeneration. + */ + public void addPrimaryTermAndGenerationListener(long minPrimaryTerm, long minGeneration, ActionListener listener) { throw new UnsupportedOperationException(); } @@ -2129,13 +2141,13 @@ public void addFlushListener(Translog.Location location, ActionListener li * refreshed is true if a refresh happened. If refreshed, generation * contains the generation of the index commit that the reader has opened upon refresh. */ - public record RefreshResult(boolean refreshed, long generation) { + public record RefreshResult(boolean refreshed, long primaryTerm, long generation) { public static final long UNKNOWN_GENERATION = -1L; public static final RefreshResult NO_REFRESH = new RefreshResult(false); public RefreshResult(boolean refreshed) { - this(refreshed, UNKNOWN_GENERATION); + this(refreshed, UNKNOWN_PRIMARY_TERM, UNKNOWN_GENERATION); } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index e9651a7f63867..9419cd7e6ab5f 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -2075,7 +2075,8 @@ protected final RefreshResult refresh(String source, SearcherScope scope, boolea // for a long time: maybePruneDeletes(); mergeScheduler.refreshConfig(); - return new RefreshResult(refreshed, segmentGeneration); + long primaryTerm = config().getPrimaryTermSupplier().getAsLong(); + return new RefreshResult(refreshed, primaryTerm, segmentGeneration); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java index cae59baf1dfbd..aa9bddf414296 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/ReadOnlyEngine.java @@ -27,6 +27,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParser; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.seqno.SeqNoStats; @@ -176,7 +177,7 @@ protected void ensureMaxSeqNoEqualsToGlobalCheckpoint(final SeqNoStats seqNoStat // created after the refactoring of the Close Index API and its TransportVerifyShardBeforeCloseAction // that guarantee that all operations have been flushed to Lucene. IndexVersion indexVersionCreated = engineConfig.getIndexSettings().getIndexVersionCreated(); - if (indexVersionCreated.onOrAfter(IndexVersion.V_7_2_0) + if (indexVersionCreated.onOrAfter(IndexVersions.V_7_2_0) || (seqNoStats.getGlobalCheckpoint() != SequenceNumbers.UNASSIGNED_SEQ_NO)) { assert assertMaxSeqNoEqualsToGlobalCheckpoint(seqNoStats.getMaxSeqNo(), seqNoStats.getGlobalCheckpoint()); if (seqNoStats.getMaxSeqNo() != seqNoStats.getGlobalCheckpoint()) { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java index e4030a0de9d61..27ef30ed67508 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/IndexFieldDataService.java @@ -91,8 +91,12 @@ public synchronized void clearField(final String fieldName) { */ @SuppressWarnings("unchecked") public > IFD getForField(MappedFieldType fieldType, FieldDataContext fieldDataContext) { + return getFromBuilder(fieldType, fieldType.fielddataBuilder(fieldDataContext)); + } + + @SuppressWarnings("unchecked") + public > IFD getFromBuilder(MappedFieldType fieldType, IndexFieldData.Builder builder) { final String fieldName = fieldType.name(); - IndexFieldData.Builder builder = fieldType.fielddataBuilder(fieldDataContext); IndexFieldDataCache cache; synchronized (this) { cache = fieldDataCaches.get(fieldName); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java new file mode 100644 index 0000000000000..90a295e5a25f2 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockDocValuesReader.java @@ -0,0 +1,866 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.UnicodeUtil; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import org.elasticsearch.index.mapper.BlockLoader.BooleanBuilder; +import org.elasticsearch.index.mapper.BlockLoader.Builder; +import org.elasticsearch.index.mapper.BlockLoader.BuilderFactory; +import org.elasticsearch.index.mapper.BlockLoader.BytesRefBuilder; +import org.elasticsearch.index.mapper.BlockLoader.Docs; +import org.elasticsearch.index.mapper.BlockLoader.DoubleBuilder; +import org.elasticsearch.index.mapper.BlockLoader.IntBuilder; +import org.elasticsearch.index.mapper.BlockLoader.LongBuilder; + +import java.io.IOException; + +/** + * A reader that supports reading doc-values from a Lucene segment in Block fashion. + */ +public abstract class BlockDocValuesReader { + public interface Factory { + BlockDocValuesReader build(int segment) throws IOException; + + boolean supportsOrdinals(); + + SortedSetDocValues ordinals(int segment) throws IOException; + } + + protected final Thread creationThread; + + public BlockDocValuesReader() { + this.creationThread = Thread.currentThread(); + } + + /** + * Returns the current doc that this reader is on. + */ + public abstract int docID(); + + /** + * The {@link BlockLoader.Builder} for data of this type. + */ + public abstract Builder builder(BuilderFactory factory, int expectedCount); + + /** + * Reads the values of the given documents specified in the input block + */ + public abstract BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException; + + /** + * Reads the values of the given document into the builder + */ + public abstract void readValuesFromSingleDoc(int docId, Builder builder) throws IOException; + + /** + * Checks if the reader can be used to read a range documents starting with the given docID by the current thread. + */ + public static boolean canReuse(BlockDocValuesReader reader, int startingDocID) { + return reader != null && reader.creationThread == Thread.currentThread() && reader.docID() <= startingDocID; + } + + public static BlockLoader booleans(String fieldName) { + return context -> { + SortedNumericDocValues docValues = DocValues.getSortedNumeric(context.reader(), fieldName); + NumericDocValues singleton = DocValues.unwrapSingleton(docValues); + if (singleton != null) { + return new SingletonBooleans(singleton); + } + return new Booleans(docValues); + }; + } + + public static BlockLoader bytesRefsFromOrds(String fieldName) { + return new BlockLoader() { + @Override + public BlockDocValuesReader reader(LeafReaderContext context) throws IOException { + SortedSetDocValues docValues = ordinals(context); + SortedDocValues singleton = DocValues.unwrapSingleton(docValues); + if (singleton != null) { + return new SingletonOrdinals(singleton); + } + return new Ordinals(docValues); + } + + @Override + public boolean supportsOrdinals() { + return true; + } + + @Override + public SortedSetDocValues ordinals(LeafReaderContext context) throws IOException { + return DocValues.getSortedSet(context.reader(), fieldName); + } + }; + } + + /** + * Load {@link BytesRef} values from doc values. Prefer {@link #bytesRefsFromOrds} if + * doc values are indexed with ordinals because that's generally much faster. It's + * possible to use this with field data, but generally should be avoided because field + * data has higher per invocation overhead. + */ + public static BlockLoader bytesRefsFromDocValues(CheckedFunction fieldData) { + return context -> new Bytes(fieldData.apply(context)); + } + + /** + * Convert from the stored {@link long} into the {@link double} to load. + * Sadly, this will go megamorphic pretty quickly and slow us down, + * but it gets the job done for now. + */ + public interface ToDouble { + double convert(long v); + } + + /** + * Load {@code double} values from doc values. + */ + public static BlockLoader doubles(String fieldName, ToDouble toDouble) { + return context -> { + SortedNumericDocValues docValues = DocValues.getSortedNumeric(context.reader(), fieldName); + NumericDocValues singleton = DocValues.unwrapSingleton(docValues); + if (singleton != null) { + return new SingletonDoubles(singleton, toDouble); + } + return new Doubles(docValues, toDouble); + }; + } + + /** + * Load {@code int} values from doc values. + */ + public static BlockLoader ints(String fieldName) { + return context -> { + SortedNumericDocValues docValues = DocValues.getSortedNumeric(context.reader(), fieldName); + NumericDocValues singleton = DocValues.unwrapSingleton(docValues); + if (singleton != null) { + return new SingletonInts(singleton); + } + return new Ints(docValues); + }; + } + + /** + * Load a block of {@code long}s from doc values. + */ + public static BlockLoader longs(String fieldName) { + return context -> { + SortedNumericDocValues docValues = DocValues.getSortedNumeric(context.reader(), fieldName); + NumericDocValues singleton = DocValues.unwrapSingleton(docValues); + if (singleton != null) { + return new SingletonLongs(singleton); + } + return new Longs(docValues); + }; + } + + /** + * Load blocks with only null. + */ + public static BlockLoader nulls() { + return context -> new Nulls(); + } + + @Override + public abstract String toString(); + + private static class SingletonLongs extends BlockDocValuesReader { + private final NumericDocValues numericDocValues; + + SingletonLongs(NumericDocValues numericDocValues) { + this.numericDocValues = numericDocValues; + } + + @Override + public BlockLoader.LongBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.longsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.LongBuilder builder = builder(factory, docs.count())) { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (numericDocValues.advanceExact(doc)) { + builder.appendLong(numericDocValues.longValue()); + } else { + builder.appendNull(); + } + lastDoc = doc; + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + BlockLoader.LongBuilder blockBuilder = (BlockLoader.LongBuilder) builder; + if (numericDocValues.advanceExact(docId)) { + blockBuilder.appendLong(numericDocValues.longValue()); + } else { + blockBuilder.appendNull(); + } + } + + @Override + public int docID() { + return numericDocValues.docID(); + } + + @Override + public String toString() { + return "SingletonLongs"; + } + } + + private static class Longs extends BlockDocValuesReader { + private final SortedNumericDocValues numericDocValues; + private int docID = -1; + + Longs(SortedNumericDocValues numericDocValues) { + this.numericDocValues = numericDocValues; + } + + @Override + public BlockLoader.LongBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.longsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.LongBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < this.docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + read(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + read(docId, (LongBuilder) builder); + } + + private void read(int doc, LongBuilder builder) throws IOException { + this.docID = doc; + if (false == numericDocValues.advanceExact(doc)) { + builder.appendNull(); + return; + } + int count = numericDocValues.docValueCount(); + if (count == 1) { + builder.appendLong(numericDocValues.nextValue()); + return; + } + builder.beginPositionEntry(); + for (int v = 0; v < count; v++) { + builder.appendLong(numericDocValues.nextValue()); + } + builder.endPositionEntry(); + } + + @Override + public int docID() { + // There is a .docID on the numericDocValues but it is often not implemented. + return docID; + } + + @Override + public String toString() { + return "Longs"; + } + } + + private static class SingletonInts extends BlockDocValuesReader { + private final NumericDocValues numericDocValues; + + SingletonInts(NumericDocValues numericDocValues) { + this.numericDocValues = numericDocValues; + } + + @Override + public IntBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.intsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.IntBuilder builder = builder(factory, docs.count())) { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (numericDocValues.advanceExact(doc)) { + builder.appendInt(Math.toIntExact(numericDocValues.longValue())); + } else { + builder.appendNull(); + } + lastDoc = doc; + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + IntBuilder blockBuilder = (IntBuilder) builder; + if (numericDocValues.advanceExact(docId)) { + blockBuilder.appendInt(Math.toIntExact(numericDocValues.longValue())); + } else { + blockBuilder.appendNull(); + } + } + + @Override + public int docID() { + return numericDocValues.docID(); + } + + @Override + public String toString() { + return "SingletonInts"; + } + } + + private static class Ints extends BlockDocValuesReader { + private final SortedNumericDocValues numericDocValues; + private int docID = -1; + + Ints(SortedNumericDocValues numericDocValues) { + this.numericDocValues = numericDocValues; + } + + @Override + public IntBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.intsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.IntBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < this.docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + read(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + read(docId, (IntBuilder) builder); + } + + private void read(int doc, IntBuilder builder) throws IOException { + this.docID = doc; + if (false == numericDocValues.advanceExact(doc)) { + builder.appendNull(); + return; + } + int count = numericDocValues.docValueCount(); + if (count == 1) { + builder.appendInt(Math.toIntExact(numericDocValues.nextValue())); + return; + } + builder.beginPositionEntry(); + for (int v = 0; v < count; v++) { + builder.appendInt(Math.toIntExact(numericDocValues.nextValue())); + } + builder.endPositionEntry(); + } + + @Override + public int docID() { + // There is a .docID on on the numericDocValues but it is often not implemented. + return docID; + } + + @Override + public String toString() { + return "Ints"; + } + } + + private static class SingletonDoubles extends BlockDocValuesReader { + private final NumericDocValues docValues; + private final ToDouble toDouble; + private int docID = -1; + + SingletonDoubles(NumericDocValues docValues, ToDouble toDouble) { + this.docValues = docValues; + this.toDouble = toDouble; + } + + @Override + public DoubleBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.doublesFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.DoubleBuilder builder = builder(factory, docs.count())) { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (docValues.advanceExact(doc)) { + builder.appendDouble(toDouble.convert(docValues.longValue())); + } else { + builder.appendNull(); + } + lastDoc = doc; + this.docID = doc; + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + this.docID = docId; + DoubleBuilder blockBuilder = (DoubleBuilder) builder; + if (docValues.advanceExact(this.docID)) { + blockBuilder.appendDouble(toDouble.convert(docValues.longValue())); + } else { + blockBuilder.appendNull(); + } + } + + @Override + public int docID() { + return docID; + } + + @Override + public String toString() { + return "SingletonDoubles"; + } + } + + private static class Doubles extends BlockDocValuesReader { + private final SortedNumericDocValues docValues; + private final ToDouble toDouble; + private int docID = -1; + + Doubles(SortedNumericDocValues docValues, ToDouble toDouble) { + this.docValues = docValues; + this.toDouble = toDouble; + } + + @Override + public DoubleBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.doublesFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.DoubleBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < this.docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + read(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + read(docId, (DoubleBuilder) builder); + } + + private void read(int doc, DoubleBuilder builder) throws IOException { + this.docID = doc; + if (false == docValues.advanceExact(doc)) { + builder.appendNull(); + return; + } + int count = docValues.docValueCount(); + if (count == 1) { + builder.appendDouble(toDouble.convert(docValues.nextValue())); + return; + } + builder.beginPositionEntry(); + for (int v = 0; v < count; v++) { + builder.appendDouble(toDouble.convert(docValues.nextValue())); + } + builder.endPositionEntry(); + } + + @Override + public int docID() { + return docID; + } + + @Override + public String toString() { + return "Doubles"; + } + } + + private static class SingletonOrdinals extends BlockDocValuesReader { + private final SortedDocValues ordinals; + + SingletonOrdinals(SortedDocValues ordinals) { + this.ordinals = ordinals; + } + + @Override + public BytesRefBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.bytesRefsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.SingletonOrdinalsBuilder builder = factory.singletonOrdinalsBuilder(ordinals, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < ordinals.docID()) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (ordinals.advanceExact(doc)) { + builder.appendOrd(ordinals.ordValue()); + } else { + builder.appendNull(); + } + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int doc, Builder builder) throws IOException { + if (ordinals.advanceExact(doc)) { + ((BytesRefBuilder) builder).appendBytesRef(ordinals.lookupOrd(ordinals.ordValue())); + } else { + builder.appendNull(); + } + } + + @Override + public int docID() { + return ordinals.docID(); + } + + @Override + public String toString() { + return "SingletonOrdinals"; + } + } + + private static class Ordinals extends BlockDocValuesReader { + private final SortedSetDocValues ordinals; + + Ordinals(SortedSetDocValues ordinals) { + this.ordinals = ordinals; + } + + @Override + public BytesRefBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.bytesRefsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BytesRefBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < ordinals.docID()) { + throw new IllegalStateException("docs within same block must be in order"); + } + read(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int doc, Builder builder) throws IOException { + read(doc, (BytesRefBuilder) builder); + } + + private void read(int doc, BytesRefBuilder builder) throws IOException { + if (false == ordinals.advanceExact(doc)) { + builder.appendNull(); + return; + } + int count = ordinals.docValueCount(); + if (count == 1) { + builder.appendBytesRef(ordinals.lookupOrd(ordinals.nextOrd())); + return; + } + builder.beginPositionEntry(); + for (int v = 0; v < count; v++) { + builder.appendBytesRef(ordinals.lookupOrd(ordinals.nextOrd())); + } + builder.endPositionEntry(); + } + + @Override + public int docID() { + return ordinals.docID(); + } + + @Override + public String toString() { + return "Ordinals"; + } + } + + private static class Bytes extends BlockDocValuesReader { + private final SortedBinaryDocValues docValues; + private int docID = -1; + + Bytes(SortedBinaryDocValues docValues) { + this.docValues = docValues; + } + + @Override + public BytesRefBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.bytesRefsFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.BytesRefBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + read(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + read(docId, (BytesRefBuilder) builder); + } + + private void read(int doc, BytesRefBuilder builder) throws IOException { + this.docID = doc; + if (false == docValues.advanceExact(doc)) { + builder.appendNull(); + return; + } + int count = docValues.docValueCount(); + if (count == 1) { + // TODO read ords in ascending order. Buffers and stuff. + builder.appendBytesRef(docValues.nextValue()); + return; + } + builder.beginPositionEntry(); + for (int v = 0; v < count; v++) { + builder.appendBytesRef(docValues.nextValue()); + } + builder.endPositionEntry(); + } + + @Override + public int docID() { + return docID; + } + + @Override + public String toString() { + return "Bytes"; + } + } + + private static class SingletonBooleans extends BlockDocValuesReader { + private final NumericDocValues numericDocValues; + + SingletonBooleans(NumericDocValues numericDocValues) { + this.numericDocValues = numericDocValues; + } + + @Override + public BooleanBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.booleansFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.BooleanBuilder builder = builder(factory, docs.count())) { + int lastDoc = -1; + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < lastDoc) { + throw new IllegalStateException("docs within same block must be in order"); + } + if (numericDocValues.advanceExact(doc)) { + builder.appendBoolean(numericDocValues.longValue() != 0); + } else { + builder.appendNull(); + } + lastDoc = doc; + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + BooleanBuilder blockBuilder = (BooleanBuilder) builder; + if (numericDocValues.advanceExact(docId)) { + blockBuilder.appendBoolean(numericDocValues.longValue() != 0); + } else { + blockBuilder.appendNull(); + } + } + + @Override + public int docID() { + return numericDocValues.docID(); + } + + @Override + public String toString() { + return "SingletonBooleans"; + } + } + + private static class Booleans extends BlockDocValuesReader { + private final SortedNumericDocValues numericDocValues; + private int docID = -1; + + Booleans(SortedNumericDocValues numericDocValues) { + this.numericDocValues = numericDocValues; + } + + @Override + public BooleanBuilder builder(BuilderFactory factory, int expectedCount) { + return factory.booleansFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.BooleanBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < this.docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + read(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) throws IOException { + read(docId, (BooleanBuilder) builder); + } + + private void read(int doc, BooleanBuilder builder) throws IOException { + this.docID = doc; + if (false == numericDocValues.advanceExact(doc)) { + builder.appendNull(); + return; + } + int count = numericDocValues.docValueCount(); + if (count == 1) { + builder.appendBoolean(numericDocValues.nextValue() != 0); + return; + } + builder.beginPositionEntry(); + for (int v = 0; v < count; v++) { + builder.appendBoolean(numericDocValues.nextValue() != 0); + } + builder.endPositionEntry(); + } + + @Override + public int docID() { + // There is a .docID on the numericDocValues but it is often not implemented. + return docID; + } + + @Override + public String toString() { + return "Booleans"; + } + } + + private static class Nulls extends BlockDocValuesReader { + private int docID = -1; + + @Override + public BlockLoader.Builder builder(BuilderFactory factory, int expectedCount) { + return factory.nulls(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BuilderFactory factory, Docs docs) throws IOException { + try (BlockLoader.Builder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + builder.appendNull(); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, Builder builder) { + this.docID = docId; + builder.appendNull(); + } + + @Override + public int docID() { + return docID; + } + + @Override + public String toString() { + return "Nulls"; + } + } + + /** + * Convert a {@link String} into a utf-8 {@link BytesRef}. + */ + protected static BytesRef toBytesRef(BytesRef scratch, String v) { + int len = UnicodeUtil.maxUTF8Length(v.length()); + if (scratch.bytes.length < len) { + scratch.bytes = new byte[len]; + } + scratch.length = UnicodeUtil.UTF16toUTF8(v, 0, v.length(), scratch.bytes); + return scratch; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java new file mode 100644 index 0000000000000..af53ab42d35d9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockLoader.java @@ -0,0 +1,203 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.core.Releasable; + +import java.io.IOException; + +/** + * Interface for loading data in a block shape. Instances of this class + * must be immutable and thread safe. + */ +public interface BlockLoader { + /** + * Build a {@link LeafReaderContext leaf} level reader. + */ + BlockDocValuesReader reader(LeafReaderContext context) throws IOException; + + /** + * Does this loader support loading bytes via calling {@link #ordinals}. + */ + default boolean supportsOrdinals() { + return false; + } + + /** + * Load ordinals for the provided context. + */ + default SortedSetDocValues ordinals(LeafReaderContext context) throws IOException { + throw new IllegalStateException("ordinals not supported"); + } + + /** + * A list of documents to load. + */ + interface Docs { + int count(); + + int get(int i); + } + + /** + * Builds block "builders" for loading data into blocks for the compute engine. + * It's important for performance that this only have one implementation in + * production code. That implementation sits in the "compute" project. The is + * also a test implementation, but there may be no more other implementations. + */ + interface BuilderFactory { + /** + * Build a builder to load booleans as loaded from doc values. Doc values + * load booleans deduplicated and in sorted order. + */ + BooleanBuilder booleansFromDocValues(int expectedCount); + + /** + * Build a builder to load booleans without any loading constraints. + */ + BooleanBuilder booleans(int expectedCount); + + /** + * Build a builder to load {@link BytesRef}s as loaded from doc values. + * Doc values load {@linkplain BytesRef}s deduplicated and in sorted order. + */ + BytesRefBuilder bytesRefsFromDocValues(int expectedCount); + + /** + * Build a builder to load {@link BytesRef}s without any loading constraints. + */ + BytesRefBuilder bytesRefs(int expectedCount); + + /** + * Build a builder to load doubles as loaded from doc values. + * Doc values load doubles deduplicated and in sorted order. + */ + DoubleBuilder doublesFromDocValues(int expectedCount); + + /** + * Build a builder to load doubles without any loading constraints. + */ + DoubleBuilder doubles(int expectedCount); + + /** + * Build a builder to load ints as loaded from doc values. + * Doc values load ints deduplicated and in sorted order. + */ + IntBuilder intsFromDocValues(int expectedCount); + + /** + * Build a builder to load ints without any loading constraints. + */ + IntBuilder ints(int expectedCount); + + /** + * Build a builder to load longs as loaded from doc values. + * Doc values load longs deduplicated and in sorted order. + */ + LongBuilder longsFromDocValues(int expectedCount); + + /** + * Build a builder to load longs without any loading constraints. + */ + LongBuilder longs(int expectedCount); + + /** + * Build a builder that can only load null values. + * TODO this should return a block directly instead of a builder + */ + Builder nulls(int expectedCount); + + /** + * Build a reader for reading keyword ordinals. + */ + SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count); + + // TODO support non-singleton ords + } + + /** + * Marker interface for block results. The compute engine has a fleshed + * out implementation. + */ + interface Block {} + + /** + * A builder for typed values. For each document you may either call + * {@link #appendNull}, {@code append}, or + * {@link #beginPositionEntry} followed by two or more {@code append} + * calls, and then {@link #endPositionEntry}. + */ + interface Builder extends Releasable { + /** + * Build the actual block. + */ + Block build(); + + /** + * Insert a null value. + */ + Builder appendNull(); + + /** + * Start a multivalued field. + */ + Builder beginPositionEntry(); + + /** + * End a multivalued field. + */ + Builder endPositionEntry(); + } + + interface BooleanBuilder extends Builder { + /** + * Appends a boolean to the current entry. + */ + BooleanBuilder appendBoolean(boolean value); + } + + interface BytesRefBuilder extends Builder { + /** + * Appends a BytesRef to the current entry. + */ + BytesRefBuilder appendBytesRef(BytesRef value); + } + + interface DoubleBuilder extends Builder { + /** + * Appends a double to the current entry. + */ + DoubleBuilder appendDouble(double value); + } + + interface IntBuilder extends Builder { + /** + * Appends an int to the current entry. + */ + IntBuilder appendInt(int value); + } + + interface LongBuilder extends Builder { + /** + * Appends a long to the current entry. + */ + LongBuilder appendLong(long value); + } + + interface SingletonOrdinalsBuilder extends Builder { + /** + * Appends an ordinal to the builder. + */ + SingletonOrdinalsBuilder appendOrd(int value); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java new file mode 100644 index 0000000000000..1261a3612d3cb --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockSourceReader.java @@ -0,0 +1,199 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; +import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; +import org.elasticsearch.search.lookup.Source; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +/** + * Loads values from {@code _source}. This whole process is very slow and cast-tastic, + * so it doesn't really try to avoid megamorphic invocations. It's just going to be + * slow. + * + * Note that this extends {@link BlockDocValuesReader} because it pretends to load + * doc values because, for now, ESQL only knows how to load things in a doc values + * order. + */ +public abstract class BlockSourceReader extends BlockDocValuesReader { + /** + * Read {@code boolean}s from {@code _source}. + */ + public static BlockLoader booleans(ValueFetcher fetcher) { + StoredFieldLoader loader = StoredFieldLoader.create(true, Set.of()); + return context -> new BlockSourceReader(fetcher, loader.getLoader(context, null)) { + @Override + public BlockLoader.Builder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.booleans(expectedCount); + } + + @Override + protected void append(BlockLoader.Builder builder, Object v) { + ((BlockLoader.BooleanBuilder) builder).appendBoolean((Boolean) v); + } + + @Override + public String toString() { + return "SourceBooleans"; + } + }; + } + + /** + * Read {@link BytesRef}s from {@code _source}. + */ + public static BlockLoader bytesRefs(ValueFetcher fetcher) { + StoredFieldLoader loader = StoredFieldLoader.create(true, Set.of()); + return context -> new BlockSourceReader(fetcher, loader.getLoader(context, null)) { + BytesRef scratch = new BytesRef(); + + @Override + public BlockLoader.Builder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + + @Override + protected void append(BlockLoader.Builder builder, Object v) { + ((BlockLoader.BytesRefBuilder) builder).appendBytesRef(toBytesRef(scratch, (String) v)); + } + + @Override + public String toString() { + return "SourceBytes"; + } + }; + } + + /** + * Read {@code double}s from {@code _source}. + */ + public static BlockLoader doubles(ValueFetcher fetcher) { + StoredFieldLoader loader = StoredFieldLoader.create(true, Set.of()); + return context -> new BlockSourceReader(fetcher, loader.getLoader(context, null)) { + @Override + public BlockLoader.Builder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.doubles(expectedCount); + } + + @Override + protected void append(BlockLoader.Builder builder, Object v) { + ((BlockLoader.DoubleBuilder) builder).appendDouble(((Number) v).doubleValue()); + } + + @Override + public String toString() { + return "SourceDoubles"; + } + }; + } + + /** + * Read {@code int}s from {@code _source}. + */ + public static BlockLoader ints(ValueFetcher fetcher) { + StoredFieldLoader loader = StoredFieldLoader.create(true, Set.of()); + return context -> new BlockSourceReader(fetcher, loader.getLoader(context, null)) { + @Override + public BlockLoader.Builder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.ints(expectedCount); + } + + @Override + protected void append(BlockLoader.Builder builder, Object v) { + ((BlockLoader.IntBuilder) builder).appendInt(((Number) v).intValue()); + } + + @Override + public String toString() { + return "SourceInts"; + } + }; + } + + /** + * Read {@code long}s from {@code _source}. + */ + public static BlockLoader longs(ValueFetcher fetcher) { + StoredFieldLoader loader = StoredFieldLoader.create(true, Set.of()); + return context -> new BlockSourceReader(fetcher, loader.getLoader(context, null)) { + @Override + public BlockLoader.Builder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.longs(expectedCount); + } + + @Override + protected void append(BlockLoader.Builder builder, Object v) { + ((BlockLoader.LongBuilder) builder).appendLong(((Number) v).longValue()); + } + + @Override + public String toString() { + return "SourceLongs"; + } + }; + } + + private final ValueFetcher fetcher; + private final LeafStoredFieldLoader loader; + private final List ignoredValues = new ArrayList<>(); + private int docID = -1; + + BlockSourceReader(ValueFetcher fetcher, LeafStoredFieldLoader loader) { + this.fetcher = fetcher; + this.loader = loader; + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) throws IOException { + try (BlockLoader.Builder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + int doc = docs.get(i); + if (doc < this.docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + readValuesFromSingleDoc(doc, builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int doc, BlockLoader.Builder builder) throws IOException { + this.docID = doc; + loader.advanceTo(doc); + List values = fetcher.fetchValues(Source.fromBytes(loader.source()), doc, ignoredValues); + ignoredValues.clear(); // TODO do something with these? + if (values == null) { + builder.appendNull(); + return; + } + if (values.size() == 1) { + append(builder, values.get(0)); + return; + } + builder.beginPositionEntry(); + for (Object v : values) { + append(builder, v); + } + builder.endPositionEntry(); + } + + protected abstract void append(BlockLoader.Builder builder, Object v); + + @Override + public int docID() { + return docID; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java new file mode 100644 index 0000000000000..5984482fd9441 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/BlockStoredFieldsReader.java @@ -0,0 +1,153 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; +import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; +import org.elasticsearch.index.mapper.BlockLoader.BytesRefBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +/** + * Loads values from {@link LeafReader#storedFields}. This whole process is very slow + * and cast-tastic, so it doesn't really try to avoid megamorphic invocations. It's + * just going to be slow. + * + * Note that this extends {@link BlockDocValuesReader} because it pretends to load + * doc values because, for now, ESQL only knows how to load things in a doc values + * order. + */ +public abstract class BlockStoredFieldsReader extends BlockDocValuesReader { + public static BlockLoader bytesRefsFromBytesRefs(String field) { + StoredFieldLoader loader = StoredFieldLoader.create(false, Set.of(field)); + return context -> new Bytes(loader.getLoader(context, null), field) { + @Override + protected BytesRef toBytesRef(Object v) { + return (BytesRef) v; + } + }; + } + + public static BlockLoader bytesRefsFromStrings(String field) { + StoredFieldLoader loader = StoredFieldLoader.create(false, Set.of(field)); + return context -> new Bytes(loader.getLoader(context, null), field) { + private final BytesRef scratch = new BytesRef(); + + @Override + protected BytesRef toBytesRef(Object v) { + return toBytesRef(scratch, (String) v); + } + }; + } + + public static BlockLoader id() { + StoredFieldLoader loader = StoredFieldLoader.create(false, Set.of(IdFieldMapper.NAME)); + return context -> new Id(loader.getLoader(context, null)); + } + + private final LeafStoredFieldLoader loader; + private int docID = -1; + + protected BlockStoredFieldsReader(LeafStoredFieldLoader loader) { + this.loader = loader; + } + + @Override + public final BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) throws IOException { + try (BlockLoader.Builder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + readValuesFromSingleDoc(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public final void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) throws IOException { + if (docId < this.docID) { + throw new IllegalStateException("docs within same block must be in order"); + } + this.docID = docId; + loader.advanceTo(docId); + read(loader, builder); + } + + protected abstract void read(LeafStoredFieldLoader loader, BlockLoader.Builder builder) throws IOException; + + @Override + public final int docID() { + return docID; + } + + private abstract static class Bytes extends BlockStoredFieldsReader { + private final String field; + + Bytes(LeafStoredFieldLoader loader, String field) { + super(loader); + this.field = field; + } + + @Override + public BytesRefBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + + protected abstract BytesRef toBytesRef(Object v); + + @Override + protected void read(LeafStoredFieldLoader loader, BlockLoader.Builder builder) throws IOException { + List values = loader.storedFields().get(field); + if (values == null) { + builder.appendNull(); + return; + } + if (values.size() == 1) { + ((BytesRefBuilder) builder).appendBytesRef(toBytesRef(values.get(0))); + return; + } + builder.beginPositionEntry(); + for (Object v : values) { + ((BytesRefBuilder) builder).appendBytesRef(toBytesRef(v)); + } + builder.endPositionEntry(); + } + + @Override + public String toString() { + return "BlockStoredFieldsReader.Bytes"; + } + } + + private static class Id extends BlockStoredFieldsReader { + private final BytesRef scratch = new BytesRef(); + + Id(LeafStoredFieldLoader loader) { + super(loader); + } + + @Override + public BlockLoader.BytesRefBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + + @Override + protected void read(LeafStoredFieldLoader loader, BlockLoader.Builder builder) throws IOException { + ((BytesRefBuilder) builder).appendBytesRef(toBytesRef(scratch, loader.id())); + } + + @Override + public String toString() { + return "BlockStoredFieldsReader.Id"; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 54961f2f489bf..1fb3f706c56a2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -255,6 +255,14 @@ public Boolean valueForDisplay(Object value) { }; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + return BlockDocValuesReader.booleans(name()); + } + return BlockSourceReader.booleans(sourceValueFetcher(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java new file mode 100644 index 0000000000000..b59df56791fbe --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptBlockDocValuesReader.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.script.BooleanFieldScript; + +/** + * {@link BlockDocValuesReader} implementation for {@code boolean} scripts. + */ +public class BooleanScriptBlockDocValuesReader extends BlockDocValuesReader { + public static BlockLoader blockLoader(BooleanFieldScript.LeafFactory factory) { + return context -> new BooleanScriptBlockDocValuesReader(factory.newInstance(context)); + } + + private final BooleanFieldScript script; + private int docId; + + BooleanScriptBlockDocValuesReader(BooleanFieldScript script) { + this.script = script; + } + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.BooleanBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + // Note that we don't emit falses before trues so we conform to the doc values contract and can use booleansFromDocValues + return factory.booleansFromDocValues(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.BooleanBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + read(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + read(docId, (BlockLoader.BooleanBuilder) builder); + } + + private void read(int docId, BlockLoader.BooleanBuilder builder) { + script.runForDoc(docId); + int total = script.falses() + script.trues(); + switch (total) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendBoolean(script.trues() > 0); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < script.falses(); i++) { + builder.appendBoolean(false); + } + for (int i = 0; i < script.trues(); i++) { + builder.appendBoolean(true); + } + builder.endPositionEntry(); + } + } + } + + @Override + public String toString() { + return "ScriptBooleans"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java index fcdf733b0febc..6e3876644567f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanScriptFieldType.java @@ -110,6 +110,11 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) { return DocValueFormat.BOOLEAN; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return BooleanScriptBlockDocValuesReader.blockLoader(leafFactory(blContext.lookup())); + } + @Override public BooleanScriptFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new BooleanScriptFieldData.Builder(name(), leafFactory(fieldDataContext.lookupSupplier().get()), BooleanDocValuesField::new); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 581c87d224f2d..2859d8bb29917 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; @@ -211,7 +212,7 @@ public CompletionFieldMapper build(MapperBuilderContext context) { private void checkCompletionContextsLimit() { if (this.contexts.getValue() != null && this.contexts.getValue().size() > COMPLETION_CONTEXTS_LIMIT) { - if (indexVersionCreated.onOrAfter(IndexVersion.V_8_0_0)) { + if (indexVersionCreated.onOrAfter(IndexVersions.V_8_0_0)) { throw new IllegalArgumentException( "Limit of completion field contexts [" + COMPLETION_CONTEXTS_LIMIT + "] has been exceeded" ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java b/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java index 1386028b4ea5b..e90e2bbe1afd0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ContentPath.java @@ -8,35 +8,42 @@ package org.elasticsearch.index.mapper; -import java.util.Stack; - public final class ContentPath { private static final char DELIMITER = '.'; private final StringBuilder sb; - private final Stack delimiterIndexes; + + private int index = 0; + + private String[] path = new String[10]; + private boolean withinLeafObject = false; public ContentPath() { this.sb = new StringBuilder(); - this.delimiterIndexes = new Stack<>(); } - public void add(String name) { - // Store the location of the previous final delimiter onto the stack, - // which will be the index of the 2nd last delimiter after appending the new name - delimiterIndexes.add(sb.length() - 1); - sb.append(name).append(DELIMITER); + String[] getPath() { + // used for testing + return path; } - public void remove() { - if (delimiterIndexes.isEmpty()) { - throw new IllegalStateException("Content path is empty"); + public void add(String name) { + path[index++] = name; + if (index == path.length) { // expand if needed + expand(); } + } + + private void expand() { + String[] newPath = new String[path.length + 10]; + System.arraycopy(path, 0, newPath, 0, path.length); + path = newPath; + } - // Deletes the last node added to the stringbuilder by deleting from the 2nd last delimiter onwards - sb.setLength(delimiterIndexes.pop() + 1); + public void remove() { + path[--index] = null; } public void setWithinLeafObject(boolean withinLeafObject) { @@ -48,16 +55,15 @@ public boolean isWithinLeafObject() { } public String pathAsText(String name) { - // If length is 0 we know that we are at the root, so return the provided string directly - if (length() == 0) { - return name; + sb.setLength(0); + for (int i = 0; i < index; i++) { + sb.append(path[i]).append(DELIMITER); } - - return sb + name; + sb.append(name); + return sb.toString(); } public int length() { - // The amount of delimiters we've added tells us the amount of nodes that have been added to the path - return delimiterIndexes.size(); + return index; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index cd71c80cdb8ed..21b9ec04c56c0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -34,6 +34,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; @@ -326,7 +327,7 @@ private Long parseNullValue(DateFieldType fieldType) { try { return fieldType.parse(nullValue.getValue()); } catch (Exception e) { - if (indexCreatedVersion.onOrAfter(IndexVersion.V_8_0_0)) { + if (indexCreatedVersion.onOrAfter(IndexVersions.V_8_0_0)) { throw new MapperParsingException("Error parsing [null_value] on field [" + name() + "]: " + e.getMessage(), e); } else { DEPRECATION_LOGGER.warn( @@ -772,6 +773,14 @@ public Function pointReaderIfPossible() { return null; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + return BlockDocValuesReader.longs(name()); + } + return BlockSourceReader.longs(sourceValueFetcher(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java new file mode 100644 index 0000000000000..ad630a71870a4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptBlockDocValuesReader.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.script.DateFieldScript; + +/** + * {@link BlockDocValuesReader} implementation for date scripts. + */ +public class DateScriptBlockDocValuesReader extends BlockDocValuesReader { + public static BlockLoader blockLoader(DateFieldScript.LeafFactory factory) { + return context -> new DateScriptBlockDocValuesReader(factory.newInstance(context)); + } + + private final DateFieldScript script; + private int docId; + + DateScriptBlockDocValuesReader(DateFieldScript script) { + this.script = script; + } + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.LongBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.longs(expectedCount); // Note that we don't pre-sort our output so we can't use longsFromDocValues + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.LongBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + read(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + read(docId, (BlockLoader.LongBuilder) builder); + } + + private void read(int docId, BlockLoader.LongBuilder builder) { + script.runForDoc(docId); + switch (script.count()) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendLong(script.values()[0]); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < script.count(); i++) { + builder.appendLong(script.values()[i]); + } + builder.endPositionEntry(); + } + } + } + + @Override + public String toString() { + return "ScriptDates"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java index 10c68e023baa2..8252d571dce68 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateScriptFieldType.java @@ -179,6 +179,11 @@ public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) { return new DocValueFormat.DateTime(dateTimeFormatter, timeZone, Resolution.MILLISECONDS); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return DateScriptBlockDocValuesReader.blockLoader(leafFactory(blContext.lookup())); + } + @Override public DateScriptFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new DateScriptFieldData.Builder( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 02d4aa7a756e0..f7dc09cdbb370 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import java.util.List; @@ -52,7 +53,7 @@ public static DocumentMapper createEmpty(MapperService mapperService) { boolean isSyntheticSourceMalformed(CompressedXContent source, IndexVersion version) { return sourceMapper().isSynthetic() && source.string().contains("\"_source\":{\"mode\":\"synthetic\"}") == false - && version.onOrBefore(IndexVersion.V_8_10_0); + && version.onOrBefore(IndexVersions.V_8_10_0); } public Mapping mapping() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index 16bd3967da57e..996c6243064e9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; @@ -49,7 +50,7 @@ */ public final class DocumentParser { - public static final IndexVersion DYNAMICALLY_MAP_DENSE_VECTORS_INDEX_VERSION = IndexVersion.FIRST_DETACHED_INDEX_VERSION; + public static final IndexVersion DYNAMICALLY_MAP_DENSE_VECTORS_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; private final XContentParserConfiguration parserConfiguration; private final Supplier documentParsingObserverSupplier; @@ -823,6 +824,11 @@ private static class NoOpObjectMapper extends ObjectMapper { NoOpObjectMapper(String name, String fullPath) { super(name, fullPath, Explicit.IMPLICIT_TRUE, Explicit.IMPLICIT_TRUE, Dynamic.RUNTIME, Collections.emptyMap()); } + + @Override + public ObjectMapper merge(Mapper mergeWith, MapperBuilderContext mapperBuilderContext) { + return this; + } } /** diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java b/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java index 0d06eabb4f19b..6cf44ba6bc447 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DotExpandingXContentParser.java @@ -172,34 +172,60 @@ protected XContentParser delegate() { return parsers.peek(); } + /* + The following methods (map* and list*) are known not be called by DocumentParser when parsing documents, but we support indexing + percolator queries which are also parsed through DocumentParser, and their parsing code is completely up to each query, which are + also pluggable. That means that this parser needs to fully support parsing arbitrary content, when dots expansion is turned off. + We do throw UnsupportedOperationException when dots expansion is enabled as we don't expect such methods to be ever called in + those circumstances. + */ + @Override public Map map() throws IOException { + if (contentPath.isWithinLeafObject()) { + return super.map(); + } throw new UnsupportedOperationException(); } @Override public Map mapOrdered() throws IOException { + if (contentPath.isWithinLeafObject()) { + return super.mapOrdered(); + } throw new UnsupportedOperationException(); } @Override public Map mapStrings() throws IOException { + if (contentPath.isWithinLeafObject()) { + return super.mapStrings(); + } throw new UnsupportedOperationException(); } @Override public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) throws IOException { + if (contentPath.isWithinLeafObject()) { + return super.map(mapFactory, mapValueParser); + } throw new UnsupportedOperationException(); } @Override public List list() throws IOException { + if (contentPath.isWithinLeafObject()) { + return super.list(); + } throw new UnsupportedOperationException(); } @Override public List listOrderedMap() throws IOException { + if (contentPath.isWithinLeafObject()) { + return super.listOrderedMap(); + } throw new UnsupportedOperationException(); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java new file mode 100644 index 0000000000000..4e317a3ed11cb --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptBlockDocValuesReader.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.script.DoubleFieldScript; + +/** + * {@link BlockDocValuesReader} implementation for {@code double} scripts. + */ +public class DoubleScriptBlockDocValuesReader extends BlockDocValuesReader { + public static BlockLoader blockLoader(DoubleFieldScript.LeafFactory factory) { + return context -> new DoubleScriptBlockDocValuesReader(factory.newInstance(context)); + } + + private final DoubleFieldScript script; + private int docId; + + DoubleScriptBlockDocValuesReader(DoubleFieldScript script) { + this.script = script; + } + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.DoubleBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.doubles(expectedCount); // Note that we don't pre-sort our output so we can't use doublesFromDocValues + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.DoubleBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + read(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + read(docId, (BlockLoader.DoubleBuilder) builder); + } + + private void read(int docId, BlockLoader.DoubleBuilder builder) { + script.runForDoc(docId); + switch (script.count()) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendDouble(script.values()[0]); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < script.count(); i++) { + builder.appendDouble(script.values()[i]); + } + builder.endPositionEntry(); + } + } + } + + @Override + public String toString() { + return "ScriptDoubles"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java index 9baa6340b45c7..ef5c112ef212a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DoubleScriptFieldType.java @@ -105,6 +105,11 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) { return new DocValueFormat.Decimal(format); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return DoubleScriptBlockDocValuesReader.blockLoader(leafFactory(blContext.lookup())); + } + @Override public DoubleScriptFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new DoubleScriptFieldData.Builder(name(), leafFactory(fieldDataContext.lookupSupplier().get()), DoubleDocValuesField::new); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 350ac22c5e216..974d935a35e50 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -53,7 +54,7 @@ import static org.elasticsearch.core.Strings.format; -public abstract class FieldMapper extends Mapper implements Cloneable { +public abstract class FieldMapper extends Mapper { private static final Logger logger = LogManager.getLogger(FieldMapper.class); public static final Setting IGNORE_MALFORMED_SETTING = Setting.boolSetting( @@ -1310,7 +1311,7 @@ public final void parse(String name, MappingParserContext parserContext, Map { - if (parserContext.indexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) { + if (parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { throw new MapperParsingException("Unknown parameter [boost] on mapper [" + name + "]"); } deprecationLogger.warn( @@ -1354,7 +1355,7 @@ public final void parse(String name, MappingParserContext parserContext, Map DEPRECATED_PARAMS = Set.of("store", "meta", "index", "doc_values", "index_options", "similarity"); private static boolean isDeprecatedParameter(String propName, IndexVersion indexCreatedVersion) { - if (indexCreatedVersion.onOrAfter(IndexVersion.V_8_0_0)) { + if (indexCreatedVersion.onOrAfter(IndexVersions.V_8_0_0)) { return false; } return DEPRECATED_PARAMS.contains(propName); @@ -1430,7 +1431,7 @@ public static final class TypeParser implements Mapper.TypeParser { * @param builderFunction a function that produces a Builder from a name and parsercontext */ public TypeParser(BiFunction builderFunction) { - this(builderFunction, (n, c) -> {}, IndexVersion.MINIMUM_COMPATIBLE); + this(builderFunction, (n, c) -> {}, IndexVersions.MINIMUM_COMPATIBLE); } /** @@ -1445,7 +1446,7 @@ public TypeParser( BiFunction builderFunction, BiConsumer contextValidator ) { - this(builderFunction, contextValidator, IndexVersion.MINIMUM_COMPATIBLE); + this(builderFunction, contextValidator, IndexVersions.MINIMUM_COMPATIBLE); } private TypeParser( diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index cefd31efb49dd..57a4885d44d5a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collections; @@ -64,7 +65,7 @@ static class Builder extends MetadataFieldMapper.Builder { private final boolean createdOnOrAfterV8; Builder(IndexVersion indexVersionCreated) { - this(indexVersionCreated.onOrAfter(IndexVersion.V_8_0_0)); + this(indexVersionCreated.onOrAfter(IndexVersions.V_8_0_0)); } Builder(boolean createdOnOrAfterV8) { @@ -101,7 +102,7 @@ public FieldNamesFieldMapper build() { private static final FieldNamesFieldMapper DEFAULT_OLD = new FieldNamesFieldMapper(Defaults.ENABLED, false); public static final TypeParser PARSER = new ConfigurableTypeParser( - c -> c.indexVersionCreated().onOrAfter(IndexVersion.V_8_0_0) ? DEFAULT : DEFAULT_OLD, + c -> c.indexVersionCreated().onOrAfter(IndexVersions.V_8_0_0) ? DEFAULT : DEFAULT_OLD, c -> new Builder(c.indexVersionCreated()) ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index ad287e1c6b005..e39684705e26a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -18,7 +18,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -131,7 +131,7 @@ public String typeName() { @Override public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0) - if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersion.V_7_5_0)) { + if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { throw new QueryShardException( context, ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]." diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index c24b3077f700c..5f987fd96ca66 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -10,6 +10,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -77,6 +78,46 @@ public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext ); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + // TODO build a constant block directly + BytesRef bytes = new BytesRef(blContext.indexName()); + return context -> new BlockDocValuesReader() { + private int docId; + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.BytesRefBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.BytesRefBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + builder.appendBytesRef(bytes); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + ((BlockLoader.BytesRefBuilder) builder).appendBytesRef(bytes); + } + + @Override + public String toString() { + return "Index"; + } + }; + } + @Override public ValueFetcher valueFetcher(SearchExecutionContext context, String format) { return new ValueFetcher() { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 80810ee0d7ab4..7d6b7711360fe 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; @@ -130,7 +131,7 @@ private InetAddress parseNullValue() { try { return InetAddresses.forString(nullValueAsString); } catch (Exception e) { - if (indexCreatedVersion.onOrAfter(IndexVersion.V_8_0_0)) { + if (indexCreatedVersion.onOrAfter(IndexVersions.V_8_0_0)) { throw new MapperParsingException("Error parsing [null_value] on field [" + name() + "]: " + e.getMessage(), e); } else { DEPRECATION_LOGGER.warn( @@ -405,6 +406,14 @@ public static Query rangeQuery( return builder.apply(lower, upper); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + return BlockDocValuesReader.bytesRefsFromOrds(name()); + } + return null; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java new file mode 100644 index 0000000000000..23229a6533cdb --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptBlockDocValuesReader.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.script.IpFieldScript; + +/** + * {@link BlockDocValuesReader} implementation for keyword scripts. + */ +public class IpScriptBlockDocValuesReader extends BlockDocValuesReader { + public static BlockLoader blockLoader(IpFieldScript.LeafFactory factory) { + return context -> new IpScriptBlockDocValuesReader(factory.newInstance(context)); + } + + private final IpFieldScript script; + private int docId; + + IpScriptBlockDocValuesReader(IpFieldScript script) { + this.script = script; + } + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.BytesRefBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); // Note that we don't pre-sort our output so we can't use bytesRefsFromDocValues + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.BytesRefBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + read(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + read(docId, (BlockLoader.BytesRefBuilder) builder); + } + + private void read(int docId, BlockLoader.BytesRefBuilder builder) { + script.runForDoc(docId); + switch (script.count()) { + case 0 -> builder.appendNull(); + case 1 -> { + builder.appendBytesRef(script.values()[0]); + } + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < script.count(); i++) { + builder.appendBytesRef(script.values()[i]); + } + builder.endPositionEntry(); + } + } + } + + @Override + public String toString() { + return "ScriptIps"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java index 693322506972f..0e56b30e2d5d9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpScriptFieldType.java @@ -208,4 +208,9 @@ private Query cidrQuery(String term, SearchExecutionContext context) { BytesRef upperBytes = new BytesRef(InetAddressPoint.encode(InetAddressPoint.decode(upper))); return new IpScriptFieldRangeQuery(script, leafFactory(context), name(), lowerBytes, upperBytes); } + + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return IpScriptBlockDocValuesReader.blockLoader(leafFactory(blContext.lookup())); + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1e74f90ed7393..9bc3db22365de 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -578,6 +578,24 @@ NamedAnalyzer normalizer() { return normalizer; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + return BlockDocValuesReader.bytesRefsFromOrds(name()); + } + if (isSyntheticSource) { + if (false == isStored()) { + throw new IllegalStateException( + "keyword field [" + + name() + + "] is only supported in synthetic _source index if it creates doc values or stored fields" + ); + } + return BlockStoredFieldsReader.bytesRefsFromBytesRefs(name()); + } + return BlockSourceReader.bytesRefs(sourceValueFetcher(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java new file mode 100644 index 0000000000000..6afbcae50d31f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptBlockDocValuesReader.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.script.StringFieldScript; + +/** + * {@link BlockDocValuesReader} implementation for keyword scripts. + */ +public class KeywordScriptBlockDocValuesReader extends BlockDocValuesReader { + public static BlockLoader blockLoader(StringFieldScript.LeafFactory factory) { + return context -> new KeywordScriptBlockDocValuesReader(factory.newInstance(context)); + } + + private final BytesRefBuilder bytesBuild = new BytesRefBuilder(); + private final StringFieldScript script; + private int docId; + + KeywordScriptBlockDocValuesReader(StringFieldScript script) { + this.script = script; + } + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.BytesRefBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); // Note that we don't pre-sort our output so we can't use bytesRefsFromDocValues + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.BytesRefBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + read(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + read(docId, (BlockLoader.BytesRefBuilder) builder); + } + + private void read(int docId, BlockLoader.BytesRefBuilder builder) { + script.runForDoc(docId); + switch (script.getValues().size()) { + case 0 -> builder.appendNull(); + case 1 -> { + bytesBuild.copyChars(script.getValues().get(0)); + builder.appendBytesRef(bytesBuild.get()); + } + default -> { + builder.beginPositionEntry(); + for (String v : script.getValues()) { + bytesBuild.copyChars(v); + builder.appendBytesRef(bytesBuild.get()); + } + builder.endPositionEntry(); + } + } + } + + @Override + public String toString() { + return "ScriptKeywords"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java index fb498e7eb7dcd..879a28d4c76c8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordScriptFieldType.java @@ -110,6 +110,11 @@ public Object valueForDisplay(Object value) { return binaryValue.utf8ToString(); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return KeywordScriptBlockDocValuesReader.blockLoader(leafFactory(blContext.lookup())); + } + @Override public StringScriptFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new StringScriptFieldData.Builder(name(), leafFactory(fieldDataContext.lookupSupplier().get()), KeywordDocValuesField::new); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java new file mode 100644 index 0000000000000..91c099cd2813b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptBlockDocValuesReader.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.script.LongFieldScript; + +/** + * {@link BlockDocValuesReader} implementation for {@code long} scripts. + */ +public class LongScriptBlockDocValuesReader extends BlockDocValuesReader { + public static BlockLoader blockLoader(LongFieldScript.LeafFactory factory) { + return context -> new LongScriptBlockDocValuesReader(factory.newInstance(context)); + } + + private final LongFieldScript script; + private int docId; + + LongScriptBlockDocValuesReader(LongFieldScript script) { + this.script = script; + } + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.LongBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.longs(expectedCount); // Note that we don't pre-sort our output so we can't use longsFromDocValues + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.LongBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + read(docs.get(i), builder); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + read(docId, (BlockLoader.LongBuilder) builder); + } + + private void read(int docId, BlockLoader.LongBuilder builder) { + script.runForDoc(docId); + switch (script.count()) { + case 0 -> builder.appendNull(); + case 1 -> builder.appendLong(script.values()[0]); + default -> { + builder.beginPositionEntry(); + for (int i = 0; i < script.count(); i++) { + builder.appendLong(script.values()[i]); + } + builder.endPositionEntry(); + } + } + } + + @Override + public String toString() { + return "ScriptLongs"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java index 417d60533aad0..f89babe32d0a9 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LongScriptFieldType.java @@ -105,6 +105,11 @@ public DocValueFormat docValueFormat(String format, ZoneId timeZone) { return new DocValueFormat.Decimal(format); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return LongScriptBlockDocValuesReader.blockLoader(leafFactory(blContext.lookup())); + } + @Override public LongScriptFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new LongScriptFieldData.Builder(name(), leafFactory(fieldDataContext.lookupSupplier().get()), LongDocValuesField::new); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 21ed56a82292c..b68bb1a2b1987 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.fetch.subphase.FetchFieldsPhase; +import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; import java.time.ZoneId; @@ -50,6 +51,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.function.Function; import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES; @@ -629,4 +631,33 @@ public void validateMatchedRoutingPath(String routingPath) { + "]." ); } + + /** + * Returns a loader for ESQL or {@code null} if the field doesn't support + * ESQL. + */ + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return null; + } + + /** + * Arguments for {@link #blockLoader}. + */ + public interface BlockLoaderContext { + /** + * The name of the index. + */ + String indexName(); + + /** + * {@link SearchLookup} used for building scripts. + */ + SearchLookup lookup(); + + /** + * Find the paths in {@code _source} that contain values for the field named {@code name}. + */ + Set sourcePaths(String name); + } + } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 598e8c4d394e8..e977b0aac014a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.StringLiteralDeduplicator; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.ToXContentFragment; import java.util.Map; @@ -45,7 +46,7 @@ public interface TypeParser { * Whether we can parse this type on indices with the given index created version. */ default boolean supportsVersion(IndexVersion indexCreatedVersion) { - return indexCreatedVersion.onOrAfter(IndexVersion.MINIMUM_COMPATIBLE); + return indexCreatedVersion.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java index 7fe99de6fea39..dcf24c9a61bbd 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperRegistry.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.MapperPlugin; import java.util.Collections; @@ -71,9 +72,9 @@ public Map getRuntimeFieldParsers() { * returned map uses the name of the field as a key. */ public Map getMetadataMapperParsers(IndexVersion indexCreatedVersion) { - if (indexCreatedVersion.onOrAfter(IndexVersion.V_8_0_0)) { + if (indexCreatedVersion.onOrAfter(IndexVersions.V_8_0_0)) { return metadataMapperParsers; - } else if (indexCreatedVersion.onOrAfter(IndexVersion.V_7_0_0)) { + } else if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { return metadataMapperParsers7x; } else if (indexCreatedVersion.onOrAfter(IndexVersion.fromId(6000099))) { return metadataMapperParsers6x; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java index 0ca3044af64b3..9b5027455c68c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MetadataFieldMapper.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -149,7 +149,7 @@ public final void parseMetadataField(String name, MappingParserContext parserCon Parameter parameter = paramsMap.get(propName); if (parameter == null) { if (UNSUPPORTED_PARAMETERS_8_6_0.contains(propName)) { - if (parserContext.indexVersionCreated().onOrAfter(IndexVersion.V_8_6_0)) { + if (parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_6_0)) { // silently ignore type, and a few other parameters: sadly we've been doing this for a long time deprecationLogger.warn( DeprecationCategory.API, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java index 3ff77e2192634..257b2270176bc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedObjectMapper.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.Explicit; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -62,7 +63,24 @@ public NestedObjectMapper build(MapperBuilderContext context) { } } NestedMapperBuilderContext nestedContext = new NestedMapperBuilderContext(context.buildFullName(name), parentIncludedInRoot); - return new NestedObjectMapper(name, context.buildFullName(name), buildMappers(nestedContext), this); + final String fullPath = context.buildFullName(name); + final String nestedTypePath; + if (indexCreatedVersion.before(IndexVersions.V_8_0_0)) { + nestedTypePath = "__" + fullPath; + } else { + nestedTypePath = fullPath; + } + return new NestedObjectMapper( + name, + fullPath, + buildMappers(nestedContext), + enabled, + dynamic, + includeInParent, + includeInRoot, + nestedTypePath, + NestedPathFieldMapper.filter(indexCreatedVersion, nestedTypePath) + ); } } @@ -110,21 +128,27 @@ public MapperBuilderContext createChildContext(String name) { } } - private Explicit includeInRoot; - private Explicit includeInParent; + private final Explicit includeInRoot; + private final Explicit includeInParent; private final String nestedTypePath; private final Query nestedTypeFilter; - NestedObjectMapper(String name, String fullPath, Map mappers, Builder builder) { - super(name, fullPath, builder.enabled, Explicit.IMPLICIT_TRUE, builder.dynamic, mappers); - if (builder.indexCreatedVersion.before(IndexVersion.V_8_0_0)) { - this.nestedTypePath = "__" + fullPath; - } else { - this.nestedTypePath = fullPath; - } - this.nestedTypeFilter = NestedPathFieldMapper.filter(builder.indexCreatedVersion, nestedTypePath); - this.includeInParent = builder.includeInParent; - this.includeInRoot = builder.includeInRoot; + NestedObjectMapper( + String name, + String fullPath, + Map mappers, + Explicit enabled, + ObjectMapper.Dynamic dynamic, + Explicit includeInParent, + Explicit includeInRoot, + String nestedTypePath, + Query nestedTypeFilter + ) { + super(name, fullPath, enabled, Explicit.IMPLICIT_TRUE, dynamic, mappers); + this.nestedTypePath = nestedTypePath; + this.nestedTypeFilter = nestedTypeFilter; + this.includeInParent = includeInParent; + this.includeInRoot = includeInRoot; } public Query nestedTypeFilter() { @@ -188,13 +212,15 @@ public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, Ma MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } NestedObjectMapper mergeWithObject = (NestedObjectMapper) mergeWith; - NestedObjectMapper toMerge = (NestedObjectMapper) clone(); + var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + Explicit incInParent = this.includeInParent; + Explicit incInRoot = this.includeInRoot; if (reason == MapperService.MergeReason.INDEX_TEMPLATE) { if (mergeWithObject.includeInParent.explicit()) { - toMerge.includeInParent = mergeWithObject.includeInParent; + incInParent = mergeWithObject.includeInParent; } if (mergeWithObject.includeInRoot.explicit()) { - toMerge.includeInRoot = mergeWithObject.includeInRoot; + incInRoot = mergeWithObject.includeInRoot; } } else { if (includeInParent.value() != mergeWithObject.includeInParent.value()) { @@ -205,16 +231,25 @@ public ObjectMapper merge(Mapper mergeWith, MapperService.MergeReason reason, Ma } } if (parentBuilderContext instanceof NestedMapperBuilderContext nc) { - if (nc.parentIncludedInRoot && toMerge.includeInParent.value()) { - toMerge.includeInRoot = Explicit.IMPLICIT_FALSE; + if (nc.parentIncludedInRoot && incInParent.value()) { + incInRoot = Explicit.IMPLICIT_FALSE; } } else { - if (toMerge.includeInParent.value()) { - toMerge.includeInRoot = Explicit.IMPLICIT_FALSE; + if (incInParent.value()) { + incInRoot = Explicit.IMPLICIT_FALSE; } } - toMerge.doMerge(mergeWithObject, reason, parentBuilderContext); - return toMerge; + return new NestedObjectMapper( + simpleName(), + fullPath(), + mergeResult.mappers(), + mergeResult.enabled(), + mergeResult.dynamic(), + incInParent, + incInRoot, + nestedTypePath, + nestedTypeFilter + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java index 721b4ada67590..6a0f4a87eb890 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NestedPathFieldMapper.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collections; @@ -29,7 +30,7 @@ public class NestedPathFieldMapper extends MetadataFieldMapper { private static final NestedPathFieldMapper INSTANCE_PRE_V8 = new NestedPathFieldMapper(NAME_PRE_V8); public static String name(IndexVersion version) { - if (version.before(IndexVersion.V_8_0_0)) { + if (version.before(IndexVersions.V_8_0_0)) { return NAME_PRE_V8; } return NAME; @@ -44,7 +45,7 @@ public static Field field(IndexVersion version, String path) { } public static final TypeParser PARSER = new FixedTypeParser( - c -> c.indexVersionCreated().before(IndexVersion.V_8_0_0) ? INSTANCE_PRE_V8 : INSTANCE + c -> c.indexVersionCreated().before(IndexVersions.V_8_0_0) ? INSTANCE_PRE_V8 : INSTANCE ); public static final class NestedPathFieldType extends StringFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 4df3b9cf02985..7dfc5a98037d0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -438,6 +438,16 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { } }; } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.doubles(fieldName, l -> HalfFloatPoint.sortableShortToHalfFloat((short) l)); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.doubles(sourceValueFetcher); + } }, FLOAT("float", NumericType.FLOAT) { @Override @@ -590,6 +600,16 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { } }; } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.doubles(fieldName, l -> NumericUtils.sortableIntToFloat((int) l)); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.doubles(sourceValueFetcher); + } }, DOUBLE("double", NumericType.DOUBLE) { @Override @@ -720,6 +740,16 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { } }; } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.doubles(fieldName, NumericUtils::sortableLongToDouble); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.doubles(sourceValueFetcher); + } }, BYTE("byte", NumericType.BYTE) { @Override @@ -813,6 +843,16 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String fieldName, String fieldSimpleName, boolean ignoreMalformed) { return NumberType.syntheticLongFieldLoader(fieldName, fieldSimpleName, ignoreMalformed); } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.ints(fieldName); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.ints(sourceValueFetcher); + } }, SHORT("short", NumericType.SHORT) { @Override @@ -902,6 +942,16 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String fieldName, String fieldSimpleName, boolean ignoreMalformed) { return NumberType.syntheticLongFieldLoader(fieldName, fieldSimpleName, ignoreMalformed); } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.ints(fieldName); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.ints(sourceValueFetcher); + } }, INTEGER("integer", NumericType.INT) { @Override @@ -1059,6 +1109,16 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String fieldName, String fieldSimpleName, boolean ignoreMalformed) { return NumberType.syntheticLongFieldLoader(fieldName, fieldSimpleName, ignoreMalformed); } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.ints(fieldName); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.ints(sourceValueFetcher); + } }, LONG("long", NumericType.LONG) { @Override @@ -1186,6 +1246,16 @@ public IndexFieldData.Builder getValueFetcherFieldDataBuilder( SourceLoader.SyntheticFieldLoader syntheticFieldLoader(String fieldName, String fieldSimpleName, boolean ignoreMalformed) { return syntheticLongFieldLoader(fieldName, fieldSimpleName, ignoreMalformed); } + + @Override + BlockLoader blockLoaderFromDocValues(String fieldName) { + return BlockDocValuesReader.longs(fieldName); + } + + @Override + BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher) { + return BlockSourceReader.longs(sourceValueFetcher); + } }; private final String name; @@ -1449,6 +1519,10 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { } }; } + + abstract BlockLoader blockLoaderFromDocValues(String fieldName); + + abstract BlockLoader blockLoaderFromSource(SourceValueFetcher sourceValueFetcher); } public static class NumberFieldType extends SimpleMappedFieldType { @@ -1579,6 +1653,18 @@ public Function pointReaderIfPossible() { return null; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (indexMode == IndexMode.TIME_SERIES && metricType == TimeSeriesParams.MetricType.COUNTER) { + // Counters are not supported by ESQL so we load them in null + return BlockDocValuesReader.nulls(); + } + if (hasDocValues()) { + return type.blockLoaderFromDocValues(name()); + } + return type.blockLoaderFromSource(sourceValueFetcher(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 6bc82cff20e58..d67763879433f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -33,7 +34,7 @@ import java.util.Set; import java.util.stream.Stream; -public class ObjectMapper extends Mapper implements Cloneable { +public class ObjectMapper extends Mapper { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); public static final String CONTENT_TYPE = "object"; @@ -361,7 +362,7 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate if (fieldName.isEmpty()) { throw new IllegalArgumentException("field name cannot be an empty string"); } - if (fieldName.isBlank() & indexCreatedVersion.onOrAfter(IndexVersion.V_8_6_0)) { + if (fieldName.isBlank() & indexCreatedVersion.onOrAfter(IndexVersions.V_8_6_0)) { // blank field names were previously accepted in mappings, but not in documents. throw new IllegalArgumentException("field name cannot contain only whitespaces"); } @@ -369,11 +370,11 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate private final String fullPath; - protected Explicit enabled; - protected Explicit subobjects; - protected volatile Dynamic dynamic; + protected final Explicit enabled; + protected final Explicit subobjects; + protected final Dynamic dynamic; - protected Map mappers; + protected final Map mappers; ObjectMapper( String name, @@ -397,18 +398,6 @@ private static void validateFieldName(String fieldName, IndexVersion indexCreate } } - @Override - protected ObjectMapper clone() { - ObjectMapper clone; - try { - clone = (ObjectMapper) super.clone(); - } catch (CloneNotSupportedException e) { - throw new RuntimeException(e); - } - clone.mappers = Map.copyOf(clone.mappers); - return clone; - } - /** * @return a Builder that will produce an empty ObjectMapper with the same configuration as this one */ @@ -475,73 +464,116 @@ protected MapperBuilderContext createChildContext(MapperBuilderContext mapperBui } public ObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { - if ((mergeWith instanceof ObjectMapper) == false) { - MapperErrors.throwObjectMappingConflictError(mergeWith.name()); - } - if (mergeWith instanceof NestedObjectMapper) { - // TODO stop NestedObjectMapper extending ObjectMapper? - MapperErrors.throwNestedMappingConflictError(mergeWith.name()); - } - ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; - ObjectMapper merged = clone(); - merged.doMerge(mergeWithObject, reason, parentBuilderContext); - return merged; + var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + return new ObjectMapper( + simpleName(), + fullPath, + mergeResult.enabled, + mergeResult.subObjects, + mergeResult.dynamic, + mergeResult.mappers + ); } - protected void doMerge(final ObjectMapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { - if (mergeWith.dynamic != null) { - this.dynamic = mergeWith.dynamic; - } + protected record MergeResult( + Explicit enabled, + Explicit subObjects, + ObjectMapper.Dynamic dynamic, + Map mappers + ) { - if (mergeWith.enabled.explicit()) { - if (reason == MergeReason.INDEX_TEMPLATE) { - this.enabled = mergeWith.enabled; - } else if (isEnabled() != mergeWith.isEnabled()) { - throw new MapperException("the [enabled] parameter can't be updated for the object mapping [" + name() + "]"); + public static MergeResult build( + ObjectMapper existing, + Mapper mergeWith, + MergeReason reason, + MapperBuilderContext parentBuilderContext + ) { + if ((mergeWith instanceof ObjectMapper) == false) { + MapperErrors.throwObjectMappingConflictError(mergeWith.name()); } - } - - if (mergeWith.subobjects.explicit()) { - if (reason == MergeReason.INDEX_TEMPLATE) { - this.subobjects = mergeWith.subobjects; - } else if (subobjects != mergeWith.subobjects) { - throw new MapperException("the [subobjects] parameter can't be updated for the object mapping [" + name() + "]"); + if (existing instanceof NestedObjectMapper == false && mergeWith instanceof NestedObjectMapper) { + // TODO stop NestedObjectMapper extending ObjectMapper? + MapperErrors.throwNestedMappingConflictError(mergeWith.name()); } - } - - MapperBuilderContext objectBuilderContext = createChildContext(parentBuilderContext, simpleName()); - Map mergedMappers = null; - for (Mapper mergeWithMapper : mergeWith) { - Mapper mergeIntoMapper = (mergedMappers == null ? mappers : mergedMappers).get(mergeWithMapper.simpleName()); - - Mapper merged; - if (mergeIntoMapper == null) { - merged = mergeWithMapper; - } else if (mergeIntoMapper instanceof ObjectMapper objectMapper) { - merged = objectMapper.merge(mergeWithMapper, reason, objectBuilderContext); + ObjectMapper mergeWithObject = (ObjectMapper) mergeWith; + final Explicit enabled; + if (mergeWithObject.enabled.explicit()) { + if (reason == MergeReason.INDEX_TEMPLATE) { + enabled = mergeWithObject.enabled; + } else if (existing.isEnabled() != mergeWithObject.isEnabled()) { + throw new MapperException("the [enabled] parameter can't be updated for the object mapping [" + existing.name() + "]"); + } else { + enabled = existing.enabled; + } } else { - assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; - if (mergeWithMapper instanceof NestedObjectMapper) { - MapperErrors.throwNestedMappingConflictError(mergeWithMapper.name()); - } else if (mergeWithMapper instanceof ObjectMapper) { - MapperErrors.throwObjectMappingConflictError(mergeWithMapper.name()); + enabled = existing.enabled; + } + final Explicit subObjects; + if (mergeWithObject.subobjects.explicit()) { + if (reason == MergeReason.INDEX_TEMPLATE) { + subObjects = mergeWithObject.subobjects; + } else if (existing.subobjects != mergeWithObject.subobjects) { + throw new MapperException( + "the [subobjects] parameter can't be updated for the object mapping [" + existing.name() + "]" + ); + } else { + subObjects = existing.subobjects; } + } else { + subObjects = existing.subobjects; + } + MapperBuilderContext objectBuilderContext = existing.createChildContext(parentBuilderContext, existing.simpleName()); + Map mergedMappers = buildMergedMappers(existing, mergeWith, reason, objectBuilderContext); + return new MergeResult( + enabled, + subObjects, + mergeWithObject.dynamic != null ? mergeWithObject.dynamic : existing.dynamic, + mergedMappers + ); + } - // If we're merging template mappings when creating an index, then a field definition always - // replaces an existing one. - if (reason == MergeReason.INDEX_TEMPLATE) { + private static Map buildMergedMappers( + ObjectMapper existing, + Mapper mergeWith, + MergeReason reason, + MapperBuilderContext objectBuilderContext + ) { + Map mergedMappers = null; + for (Mapper mergeWithMapper : mergeWith) { + Mapper mergeIntoMapper = (mergedMappers == null ? existing.mappers : mergedMappers).get(mergeWithMapper.simpleName()); + + Mapper merged; + if (mergeIntoMapper == null) { merged = mergeWithMapper; + } else if (mergeIntoMapper instanceof ObjectMapper objectMapper) { + merged = objectMapper.merge(mergeWithMapper, reason, objectBuilderContext); } else { - merged = mergeIntoMapper.merge(mergeWithMapper, objectBuilderContext); + assert mergeIntoMapper instanceof FieldMapper || mergeIntoMapper instanceof FieldAliasMapper; + if (mergeWithMapper instanceof NestedObjectMapper) { + MapperErrors.throwNestedMappingConflictError(mergeWithMapper.name()); + } else if (mergeWithMapper instanceof ObjectMapper) { + MapperErrors.throwObjectMappingConflictError(mergeWithMapper.name()); + } + + // If we're merging template mappings when creating an index, then a field definition always + // replaces an existing one. + if (reason == MergeReason.INDEX_TEMPLATE) { + merged = mergeWithMapper; + } else { + merged = mergeIntoMapper.merge(mergeWithMapper, objectBuilderContext); + } + } + if (mergedMappers == null) { + mergedMappers = new HashMap<>(existing.mappers); } + mergedMappers.put(merged.simpleName(), merged); } - if (mergedMappers == null) { - mergedMappers = new HashMap<>(mappers); + if (mergedMappers != null) { + mergedMappers = Map.copyOf(mergedMappers); + } else { + mergedMappers = Map.copyOf(existing.mappers); } - mergedMappers.put(merged.simpleName(), merged); - } - if (mergedMappers != null) { - mappers = Map.copyOf(mergedMappers); + return mergedMappers; } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java index e2821189f0564..f681d54ebbead 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ProvidedIdFieldMapper.java @@ -117,6 +117,11 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { return new TermInSetQuery(name(), bytesRefs); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return BlockStoredFieldsReader.id(); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { if (fieldDataEnabled.getAsBoolean() == false) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java index 55c6a4537f688..65fce1b69b8cc 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/RootObjectMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.xcontent.ToXContent; @@ -111,7 +112,7 @@ public RootObjectMapper build(MapperBuilderContext context) { subobjects, dynamic, buildMappers(context), - runtimeFields, + new HashMap<>(runtimeFields), dynamicDateTimeFormatters, dynamicTemplates, dateDetection, @@ -120,11 +121,11 @@ public RootObjectMapper build(MapperBuilderContext context) { } } - private Explicit dynamicDateTimeFormatters; - private Explicit dateDetection; - private Explicit numericDetection; - private Explicit dynamicTemplates; - private Map runtimeFields; + private final Explicit dynamicDateTimeFormatters; + private final Explicit dateDetection; + private final Explicit numericDetection; + private final Explicit dynamicTemplates; + private final Map runtimeFields; RootObjectMapper( String name, @@ -146,13 +147,6 @@ public RootObjectMapper build(MapperBuilderContext context) { this.numericDetection = numericDetection; } - @Override - protected ObjectMapper clone() { - ObjectMapper clone = super.clone(); - ((RootObjectMapper) clone).runtimeFields = new HashMap<>(this.runtimeFields); - return clone; - } - @Override public RootObjectMapper.Builder newBuilder(IndexVersion indexVersionCreated) { RootObjectMapper.Builder builder = new RootObjectMapper.Builder(name(), subobjects); @@ -205,25 +199,30 @@ protected MapperBuilderContext createChildContext(MapperBuilderContext mapperBui @Override public RootObjectMapper merge(Mapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { - return (RootObjectMapper) super.merge(mergeWith, reason, parentBuilderContext); - } - - @Override - protected void doMerge(ObjectMapper mergeWith, MergeReason reason, MapperBuilderContext parentBuilderContext) { - super.doMerge(mergeWith, reason, parentBuilderContext); + final var mergeResult = MergeResult.build(this, mergeWith, reason, parentBuilderContext); + final Explicit numericDetection; RootObjectMapper mergeWithObject = (RootObjectMapper) mergeWith; if (mergeWithObject.numericDetection.explicit()) { - this.numericDetection = mergeWithObject.numericDetection; + numericDetection = mergeWithObject.numericDetection; + } else { + numericDetection = this.numericDetection; } + final Explicit dateDetection; if (mergeWithObject.dateDetection.explicit()) { - this.dateDetection = mergeWithObject.dateDetection; + dateDetection = mergeWithObject.dateDetection; + } else { + dateDetection = this.dateDetection; } + final Explicit dynamicDateTimeFormatters; if (mergeWithObject.dynamicDateTimeFormatters.explicit()) { - this.dynamicDateTimeFormatters = mergeWithObject.dynamicDateTimeFormatters; + dynamicDateTimeFormatters = mergeWithObject.dynamicDateTimeFormatters; + } else { + dynamicDateTimeFormatters = this.dynamicDateTimeFormatters; } + final Explicit dynamicTemplates; if (mergeWithObject.dynamicTemplates.explicit()) { if (reason == MergeReason.INDEX_TEMPLATE) { Map templatesByKey = new LinkedHashMap<>(); @@ -235,19 +234,35 @@ protected void doMerge(ObjectMapper mergeWith, MergeReason reason, MapperBuilder } DynamicTemplate[] mergedTemplates = templatesByKey.values().toArray(new DynamicTemplate[0]); - this.dynamicTemplates = new Explicit<>(mergedTemplates, true); + dynamicTemplates = new Explicit<>(mergedTemplates, true); } else { - this.dynamicTemplates = mergeWithObject.dynamicTemplates; + dynamicTemplates = mergeWithObject.dynamicTemplates; } + } else { + dynamicTemplates = this.dynamicTemplates; } + final Map runtimeFields = new HashMap<>(this.runtimeFields); assert this.runtimeFields != mergeWithObject.runtimeFields; for (Map.Entry runtimeField : mergeWithObject.runtimeFields.entrySet()) { if (runtimeField.getValue() == null) { - this.runtimeFields.remove(runtimeField.getKey()); + runtimeFields.remove(runtimeField.getKey()); } else { - this.runtimeFields.put(runtimeField.getKey(), runtimeField.getValue()); + runtimeFields.put(runtimeField.getKey(), runtimeField.getValue()); } } + + return new RootObjectMapper( + simpleName(), + mergeResult.enabled(), + mergeResult.subObjects(), + mergeResult.dynamic(), + mergeResult.mappers(), + Map.copyOf(runtimeFields), + dynamicDateTimeFormatters, + dynamicTemplates, + dateDetection, + numericDetection + ); } @Override @@ -334,7 +349,7 @@ private static void validateDynamicTemplate(MappingParserContext parserContext, String format = "dynamic template [%s] has invalid content [%s], " + "attempted to validate it with the following match_mapping_type: %s"; String message = String.format(Locale.ROOT, format, template.getName(), Strings.toString(template), Arrays.toString(types)); - final boolean failInvalidDynamicTemplates = parserContext.indexVersionCreated().onOrAfter(IndexVersion.V_8_0_0); + final boolean failInvalidDynamicTemplates = parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_0_0); if (failInvalidDynamicTemplates) { throw new IllegalArgumentException(message, lastError); } else { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index c5d5dbec1ef15..42121147d7f09 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.search.lookup.Source; @@ -186,7 +186,7 @@ private IndexMode getIndexMode() { public static final TypeParser PARSER = new ConfigurableTypeParser( c -> c.getIndexSettings().getMode() == IndexMode.TIME_SERIES - ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersion.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT + ? c.getIndexSettings().getIndexVersionCreated().onOrAfter(IndexVersions.V_8_7_0) ? TSDB_DEFAULT : TSDB_LEGACY_DEFAULT : DEFAULT, c -> new Builder(c.getIndexSettings().getMode()) ); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 70f4d2d901b45..91616041f65f6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -54,6 +54,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -411,7 +412,7 @@ private SubFieldInfo buildPrefixInfo(MapperBuilderContext context, FieldType fie * or a multi-field). This way search will continue to work on old indices and new indices * will use the expected full name. */ - String fullName = indexCreatedVersion.before(IndexVersion.V_7_2_1) ? name() : context.buildFullName(name); + String fullName = indexCreatedVersion.before(IndexVersions.V_7_2_1) ? name() : context.buildFullName(name); // Copy the index options of the main field to allow phrase queries on // the prefix field. FieldType pft = new FieldType(fieldType); @@ -935,6 +936,30 @@ public boolean isAggregatable() { return fielddata; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (syntheticSourceDelegate != null) { + return syntheticSourceDelegate.blockLoader(blContext); + } + if (isSyntheticSource) { + if (isStored()) { + return BlockStoredFieldsReader.bytesRefsFromStrings(name()); + } + /* + * We *shouldn't fall to this exception. The mapping should be + * rejected because we've enabled synthetic source but not configured + * the index properly. But we give it a nice message anyway just in + * case. + */ + throw new IllegalArgumentException( + "fetching values from a text field [" + + name() + + "] is supported because synthetic _source is enabled and we don't have a way to load the fields" + ); + } + return BlockSourceReader.bytesRefs(SourceValueFetcher.toString(blContext.sourcePaths(name()))); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java index e8e4b6909fc4e..9d43ef398feac 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TsidExtractingIdFieldMapper.java @@ -87,6 +87,11 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { return new TermInSetQuery(name(), bytesRefs); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return BlockStoredFieldsReader.id(); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { throw new IllegalArgumentException("Fielddata is not supported on [_id] field in [time_series] indices"); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 40c96b9976317..84c2e11b1beaa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.similarity.SimilarityProvider; import java.util.ArrayList; @@ -97,7 +97,7 @@ public static boolean parseMultiField( if (parserContext.isWithinMultiField()) { // For indices created prior to 8.0, we only emit a deprecation warning and do not fail type parsing. This is to // maintain the backwards-compatibility guarantee that we can always load indexes from the previous major version. - if (parserContext.indexVersionCreated().before(IndexVersion.V_8_0_0)) { + if (parserContext.indexVersionCreated().before(IndexVersions.V_8_0_0)) { deprecationLogger.warn( DeprecationCategory.INDICES, "multifield_within_multifield", diff --git a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index 0de2a27fbaac2..54a44dd55caa4 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -54,6 +54,11 @@ public ValueFetcher valueFetcher(SearchExecutionContext context, String format) return new DocValueFetcher(docValueFormat(format, null), context.getForField(this, FielddataOperation.SEARCH)); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + return BlockDocValuesReader.longs(name()); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index d22d106b4d368..6aaea1dd32285 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -36,6 +36,7 @@ import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.ArraySourceValueFetcher; @@ -76,9 +77,9 @@ */ public class DenseVectorFieldMapper extends FieldMapper { - public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersion.V_7_5_0; - public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersion.FIRST_DETACHED_INDEX_VERSION; - public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersion.V_8_9_0; + public static final IndexVersion MAGNITUDE_STORED_INDEX_VERSION = IndexVersions.V_7_5_0; + public static final IndexVersion INDEXED_BY_DEFAULT_INDEX_VERSION = IndexVersions.FIRST_DETACHED_INDEX_VERSION; + public static final IndexVersion LITTLE_ENDIAN_FLOAT_STORED_INDEX_VERSION = IndexVersions.V_8_9_0; public static final String CONTENT_TYPE = "dense_vector"; public static short MAX_DIMS_COUNT = 4096; // maximum allowed number of dimensions diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java index f10055fd4669b..3b892fc1647b6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapper.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -44,10 +45,10 @@ public class SparseVectorFieldMapper extends FieldMapper { static final String ERROR_MESSAGE_7X = "[sparse_vector] field type in old 7.x indices is allowed to " + "contain [sparse_vector] fields, but they cannot be indexed or searched."; static final String ERROR_MESSAGE_8X = "The [sparse_vector] field type is not supported from 8.0 to 8.10 versions."; - static final IndexVersion PREVIOUS_SPARSE_VECTOR_INDEX_VERSION = IndexVersion.V_8_0_0; + static final IndexVersion PREVIOUS_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.V_8_0_0; - static final IndexVersion NEW_SPARSE_VECTOR_INDEX_VERSION = IndexVersion.NEW_SPARSE_VECTOR; - static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_INDEX_VERSION = IndexVersion.SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT; + static final IndexVersion NEW_SPARSE_VECTOR_INDEX_VERSION = IndexVersions.NEW_SPARSE_VECTOR; + static final IndexVersion SPARSE_VECTOR_IN_FIELD_NAMES_INDEX_VERSION = IndexVersions.SPARSE_VECTOR_IN_FIELD_NAMES_SUPPORT; public static class Builder extends FieldMapper.Builder { diff --git a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java index 29537b4ddbdb9..2e39b13b34c78 100644 --- a/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java +++ b/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java @@ -23,7 +23,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.gateway.WriteStateException; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.SafeCommitInfo; import org.elasticsearch.index.shard.AbstractIndexShardComponent; import org.elasticsearch.index.shard.IndexShard; @@ -974,15 +974,15 @@ public ReplicationTracker( this.pendingInSync = new HashSet<>(); this.routingTable = null; this.replicationGroup = null; - this.hasAllPeerRecoveryRetentionLeases = indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_6_0) + this.hasAllPeerRecoveryRetentionLeases = indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_6_0) || (indexSettings.isSoftDeleteEnabled() - && indexSettings.getIndexVersionCreated().onOrAfter(IndexVersion.V_7_4_0) + && indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.V_7_4_0) && indexSettings.getIndexMetadata().getState() == IndexMetadata.State.OPEN); this.fileBasedRecoveryThreshold = IndexSettings.FILE_BASED_RECOVERY_THRESHOLD_SETTING.get(indexSettings.getSettings()); this.safeCommitInfoSupplier = safeCommitInfoSupplier; this.onReplicationGroupUpdated = onReplicationGroupUpdated; - assert IndexVersion.ZERO.equals(indexSettings.getIndexVersionCreated()) == false; + assert IndexVersions.ZERO.equals(indexSettings.getIndexVersionCreated()) == false; assert invariant(); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index ba8fd01ae028e..f4812f280f917 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -72,6 +72,7 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.bulk.stats.BulkOperationListener; import org.elasticsearch.index.bulk.stats.BulkStats; @@ -3218,7 +3219,7 @@ public RetentionLease addPeerRecoveryRetentionLease( ) { assert assertPrimaryMode(); // only needed for BWC reasons involving rolling upgrades from versions that do not support PRRLs: - assert indexSettings.getIndexVersionCreated().before(IndexVersion.V_7_4_0) || indexSettings.isSoftDeleteEnabled() == false; + assert indexSettings.getIndexVersionCreated().before(IndexVersions.V_7_4_0) || indexSettings.isSoftDeleteEnabled() == false; return replicationTracker.addPeerRecoveryRetentionLease(nodeId, globalCheckpoint, listener); } @@ -4172,7 +4173,18 @@ public String toString() { return "IndexShard(shardRouting=" + shardRouting + ")"; } + /** + * @deprecated use {@link #waitForPrimaryTermAndGeneration(long, long, ActionListener)} instead. + */ + @Deprecated public void waitForSegmentGeneration(long segmentGeneration, ActionListener listener) { - getEngine().addSegmentGenerationListener(segmentGeneration, listener); + waitForPrimaryTermAndGeneration(getOperationPrimaryTerm(), segmentGeneration, listener); + } + + /** + * Registers a listener for an event when the shard advances to the provided primary term and segment generation + */ + public void waitForPrimaryTermAndGeneration(long primaryTerm, long segmentGeneration, ActionListener listener) { + getEngine().addPrimaryTermAndGenerationListener(primaryTerm, segmentGeneration, listener); } } diff --git a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java index 72f958a5fd9ca..ded3ffa4ebcc0 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/StoreRecovery.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -552,6 +553,8 @@ private void restore( if (indexId.getId().equals(IndexMetadata.INDEX_UUID_NA_VALUE)) { // BwC path, running against an old version master that did not add the IndexId to the recovery source repository.getRepositoryData( + // TODO no need to fork back to GENERIC if using cached repo data, see #101445 + EsExecutors.DIRECT_EXECUTOR_SERVICE, new ThreadedActionListener<>( indexShard.getThreadPool().generic(), indexIdListener.map(repositoryData -> repositoryData.resolveIndexId(indexId.getName())) diff --git a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java index cc701eef7d063..76764f589c09b 100644 --- a/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java +++ b/server/src/main/java/org/elasticsearch/index/similarity/SimilarityProviders.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import java.util.Arrays; @@ -100,7 +101,7 @@ private static BasicModel parseBasicModel(IndexVersion indexCreatedVersion, Sett if (model == null) { String replacement = LEGACY_BASIC_MODELS.get(basicModel); if (replacement != null) { - if (indexCreatedVersion.onOrAfter(IndexVersion.V_7_0_0)) { + if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { throw new IllegalArgumentException( "Basic model [" + basicModel + "] isn't supported anymore, " + "please use another model." ); @@ -139,7 +140,7 @@ private static AfterEffect parseAfterEffect(IndexVersion indexCreatedVersion, Se if (effect == null) { String replacement = LEGACY_AFTER_EFFECTS.get(afterEffect); if (replacement != null) { - if (indexCreatedVersion.onOrAfter(IndexVersion.V_7_0_0)) { + if (indexCreatedVersion.onOrAfter(IndexVersions.V_7_0_0)) { throw new IllegalArgumentException( "After effect [" + afterEffect + "] isn't supported anymore, please use another effect." ); @@ -239,7 +240,7 @@ static void assertSettingsIsSubsetOf(String type, IndexVersion version, Settings unknownSettings.removeAll(Arrays.asList(supportedSettings)); unknownSettings.remove("type"); // used to figure out which sim this is if (unknownSettings.isEmpty() == false) { - if (version.onOrAfter(IndexVersion.V_7_0_0)) { + if (version.onOrAfter(IndexVersions.V_7_0_0)) { throw new IllegalArgumentException("Unknown settings for similarity of type [" + type + "]: " + unknownSettings); } else { deprecationLogger.warn( diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 015fbf06e042c..b7bf3a68ade07 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -59,6 +59,7 @@ import org.elasticsearch.env.ShardLockObtainFailedException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.CombinedDeletionPolicy; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.SequenceNumbers; @@ -830,7 +831,7 @@ static MetadataSnapshot loadFromIndexCommit(IndexCommit commit, Directory direct } } if (maxVersion == null) { - maxVersion = IndexVersion.MINIMUM_COMPATIBLE.luceneVersion(); + maxVersion = IndexVersions.MINIMUM_COMPATIBLE.luceneVersion(); } final String segmentsFile = segmentCommitInfos.getSegmentsFileName(); checksumFromLuceneFile( diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 3ba85b9ce4c69..902e080c42328 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -19,6 +19,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; @@ -136,7 +137,7 @@ private static NamedRegistry> setupTokenFil tokenFilters.register("standard", new AnalysisProvider() { @Override public TokenFilterFactory get(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - if (indexSettings.getIndexVersionCreated().before(IndexVersion.V_7_0_0)) { + if (indexSettings.getIndexVersionCreated().before(IndexVersions.V_7_0_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "standard_deprecation", @@ -201,7 +202,7 @@ static Map setupPreConfiguredTokenFilters(List // This was originally removed in 7_0_0 but due to a cacheing bug it was still possible // in certain circumstances to create a new index referencing the standard token filter // until version 7_5_2 - if (version.before(IndexVersion.V_7_6_0)) { + if (version.before(IndexVersions.V_7_6_0)) { deprecationLogger.warn( DeprecationCategory.ANALYSIS, "standard_deprecation", diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index 2cdd383114497..a570c88ddaba7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -336,8 +336,18 @@ public void onResponse(ActionResponse.Empty ignored) { @Override public void onFailure(Exception e) { + final var cause = ExceptionsHelper.unwrapCause(e); + final var sendShardFailure = + // these indicate the source shard has already failed, which will independently notify the master and fail + // the target shard + false == (cause instanceof ShardNotFoundException || cause instanceof IndexNotFoundException); + // TODO retries? See RecoveryResponseHandler#handleException - onGoingRecoveries.failRecovery(recoveryId, new RecoveryFailedException(recoveryState, null, e), true); + onGoingRecoveries.failRecovery( + recoveryId, + new RecoveryFailedException(recoveryState, null, e), + sendShardFailure + ); } } ); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java index 287521ae60f32..47405e0daa0a7 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySettings.java @@ -29,6 +29,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.monitor.os.OsProbe; import org.elasticsearch.node.NodeRoleSettings; @@ -47,9 +48,9 @@ public class RecoverySettings { public static final Version SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = Version.V_7_15_0; - public static final IndexVersion SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION = IndexVersion.V_7_15_0; + public static final IndexVersion SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION = IndexVersions.V_7_15_0; public static final TransportVersion SNAPSHOT_RECOVERIES_SUPPORTED_TRANSPORT_VERSION = TransportVersions.V_7_15_0; - public static final IndexVersion SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = IndexVersion.V_7_16_0; + public static final IndexVersion SEQ_NO_SNAPSHOT_RECOVERIES_SUPPORTED_VERSION = IndexVersions.V_7_16_0; public static final TransportVersion SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_TRANSPORT_VERSION = TransportVersions.V_7_16_0; private static final Logger logger = LogManager.getLogger(RecoverySettings.class); diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 81bc226102f62..84385ee04c2dd 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -38,7 +38,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.RecoveryEngineException; import org.elasticsearch.index.seqno.ReplicationTracker; @@ -976,7 +976,7 @@ void createRetentionLease(final long startingSeqNo, ActionListener Optional getSinglePlugin(Class pluginClass) { + return getSinglePlugin(pluginsService.filterPlugins(pluginClass).stream(), pluginClass); + } + + private Optional getSinglePlugin(Stream plugins, Class pluginClass) { + var it = plugins.iterator(); + if (it.hasNext() == false) { + return Optional.empty(); + } + T plugin = it.next(); + if (it.hasNext()) { + List allPlugins = new ArrayList<>(); + allPlugins.add(plugin); + it.forEachRemaining(allPlugins::add); + throw new IllegalStateException("A single " + pluginClass.getName() + " was expected but got :" + allPlugins); + } + return Optional.of(plugin); + } + private void construct(Environment initialEnvironment, NodeServiceProvider serviceProvider, boolean forbidPrivateIndexSettings) throws IOException { // Pass the node settings to the DeprecationLogger class so that it can have the deprecation.skip_deprecated_settings setting: DeprecationLogger.initialize(initialEnvironment.settings()); - Settings tmpSettings = Settings.builder() - .put(initialEnvironment.settings()) - .put(Client.CLIENT_TYPE_SETTING_S.getKey(), "node") - .build(); + Settings environmentSettings = initialEnvironment.settings(); final JvmInfo jvmInfo = JvmInfo.jvmInfo(); logger.info( @@ -356,7 +374,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi Build.current().qualifiedVersion() ); } - if (Environment.PATH_SHARED_DATA_SETTING.exists(tmpSettings)) { + if (Environment.PATH_SHARED_DATA_SETTING.exists(environmentSettings)) { // NOTE: this must be done with an explicit check here because the deprecation property on a path setting will // cause ES to fail to start since logging is not yet initialized on first read of the setting deprecationLogger.warn( @@ -375,7 +393,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi + "multiple disks. This feature will be removed in a future release." ); } - if (Environment.dataPathUsesList(tmpSettings)) { + if (Environment.dataPathUsesList(environmentSettings)) { // already checked for multiple values above, so if this is a list it is a single valued list deprecationLogger.warn( DeprecationCategory.SETTINGS, @@ -399,15 +417,15 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi (e, apmConfig) -> logger.error("failed to delete temporary APM config file [{}], reason: [{}]", apmConfig, e.getMessage()) ); - this.pluginsService = serviceProvider.pluginsServiceCtor(initialEnvironment).apply(tmpSettings); - final Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), tmpSettings); + pluginsService = serviceProvider.newPluginService(initialEnvironment, environmentSettings); + final Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), environmentSettings); /* * Create the environment based on the finalized view of the settings. This is to ensure that components get the same setting * values, no matter they ask for them from. */ - this.environment = new Environment(settings, initialEnvironment.configFile()); - Environment.assertEquivalent(initialEnvironment, this.environment); + environment = new Environment(settings, initialEnvironment.configFile()); + Environment.assertEquivalent(initialEnvironment, environment); final List> executorBuilders = pluginsService.flatMap(p -> p.getExecutorBuilders(settings)).toList(); @@ -424,7 +442,9 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi Task.HEADERS_TO_COPY.stream() ).collect(Collectors.toSet()); - final TelemetryProvider telemetryProvider = getTelemetryProvider(pluginsService, settings); + final TelemetryProvider telemetryProvider = getSinglePlugin(TelemetryPlugin.class).map(p -> p.getTelemetryProvider(settings)) + .orElse(TelemetryProvider.NOOP); + final Tracer tracer = telemetryProvider.getTracer(); final TaskManager taskManager = new TaskManager(settings, threadPool, taskHeaders, tracer); @@ -446,7 +466,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi threadPool::absoluteTimeInMillis ); AnalysisModule analysisModule = new AnalysisModule( - this.environment, + environment, pluginsService.filterPlugins(AnalysisPlugin.class), pluginsService.getStablePluginRegistry() ); @@ -461,12 +481,12 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi // creating `NodeEnvironment` breaks the ability to rollback to 7.x on an 8.0 upgrade (`upgradeLegacyNodeFolders`) so do this // after settings validation. - nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + nodeEnvironment = new NodeEnvironment(environmentSettings, environment); logger.info( "node name [{}], node ID [{}], cluster name [{}], roles {}", - Node.NODE_NAME_SETTING.get(tmpSettings), + Node.NODE_NAME_SETTING.get(environmentSettings), nodeEnvironment.nodeId(), - ClusterName.CLUSTER_NAME_SETTING.get(tmpSettings).value(), + ClusterName.CLUSTER_NAME_SETTING.get(environmentSettings).value(), DiscoveryNode.getRolesFromSettings(settings) .stream() .map(DiscoveryNodeRole::roleName) @@ -477,7 +497,11 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi ScriptModule.registerClusterSettingsListeners(scriptService, settingsModule.getClusterSettings()); final NetworkService networkService = new NetworkService( - getCustomNameResolvers(pluginsService.filterPlugins(DiscoveryPlugin.class)) + pluginsService.filterPlugins(DiscoveryPlugin.class) + .stream() + .map(d -> d.getCustomNameResolver(environment.settings())) + .filter(Objects::nonNull) + .toList() ); List clusterPlugins = pluginsService.filterPlugins(ClusterPlugin.class); @@ -503,12 +527,12 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi final IngestService ingestService = new IngestService( clusterService, threadPool, - this.environment, + environment, scriptService, analysisModule.getAnalysisRegistry(), pluginsService.filterPlugins(IngestPlugin.class), client, - IngestService.createGrokThreadWatchdog(this.environment, threadPool), + IngestService.createGrokThreadWatchdog(environment, threadPool), documentParsingObserverSupplier ); final SetOnce repositoriesServiceReference = new SetOnce<>(); @@ -523,16 +547,17 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi SearchModule searchModule = new SearchModule(settings, pluginsService.filterPlugins(SearchPlugin.class)); IndexSearcher.setMaxClauseCount(SearchUtils.calculateMaxClauseValue(threadPool)); - List namedWriteables = Stream.of( - NetworkModule.getNamedWriteables().stream(), - IndicesModule.getNamedWriteables().stream(), - searchModule.getNamedWriteables().stream(), - pluginsService.flatMap(Plugin::getNamedWriteables), - ClusterModule.getNamedWriteables().stream(), - SystemIndexMigrationExecutor.getNamedWriteables().stream(), - inferenceServiceRegistry.getNamedWriteables().stream() - ).flatMap(Function.identity()).toList(); - final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); + final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry( + Stream.of( + NetworkModule.getNamedWriteables().stream(), + IndicesModule.getNamedWriteables().stream(), + searchModule.getNamedWriteables().stream(), + pluginsService.flatMap(Plugin::getNamedWriteables), + ClusterModule.getNamedWriteables().stream(), + SystemIndexMigrationExecutor.getNamedWriteables().stream(), + inferenceServiceRegistry.getNamedWriteables().stream() + ).flatMap(Function.identity()).toList() + ); NamedXContentRegistry xContentRegistry = new NamedXContentRegistry( Stream.of( NetworkModule.getNamedXContents().stream(), @@ -542,7 +567,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi ClusterModule.getNamedXWriteables().stream(), SystemIndexMigrationExecutor.getNamedXContentParsers().stream(), HealthNodeTaskExecutor.getNamedXContentParsers().stream() - ).flatMap(Function.identity()).collect(toList()) + ).flatMap(Function.identity()).toList() ); final List features = pluginsService.filterPlugins(SystemIndexPlugin.class).stream().map(plugin -> { SystemIndices.validateFeatureName(plugin.getFeatureName(), plugin.getClass().getCanonicalName()); @@ -615,7 +640,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi // collect engine factory providers from plugins final Collection enginePlugins = pluginsService.filterPlugins(EnginePlugin.class); final Collection>> engineFactoryProviders = enginePlugins.stream() - .map(plugin -> (Function>) plugin::getEngineFactory) + .>>map(plugin -> plugin::getEngineFactory) .toList(); final Map indexStoreFactories = pluginsService.filterPlugins(IndexStorePlugin.class) @@ -717,24 +742,46 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi threadPool ); - Collection pluginComponents = pluginsService.flatMap( - p -> p.createComponents( - client, - clusterService, - threadPool, - resourceWatcherService, - scriptService, - xContentRegistry, - environment, - nodeEnvironment, - namedWriteableRegistry, - clusterModule.getIndexNameExpressionResolver(), - repositoriesServiceReference::get, - telemetryProvider, - clusterModule.getAllocationService(), - indicesService - ) - ).toList(); + FeatureService featureService = new FeatureService(pluginsService.loadServiceProviders(FeatureSpecification.class)); + + record PluginServiceInstances( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier, + TelemetryProvider telemetryProvider, + AllocationService allocationService, + IndicesService indicesService, + FeatureService featureService, + SystemIndices systemIndices + ) implements Plugin.PluginServices {} + PluginServiceInstances pluginServices = new PluginServiceInstances( + client, + clusterService, + threadPool, + resourceWatcherService, + scriptService, + xContentRegistry, + environment, + nodeEnvironment, + namedWriteableRegistry, + clusterModule.getIndexNameExpressionResolver(), + repositoriesServiceReference::get, + telemetryProvider, + clusterModule.getAllocationService(), + indicesService, + featureService, + systemIndices + ); + + Collection pluginComponents = pluginsService.flatMap(p -> p.createComponents(pluginServices)).toList(); List> reservedStateHandlers = new ArrayList<>(); @@ -754,26 +801,13 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi reservedStateHandlers.add(new ReservedComposableIndexTemplateAction(templateService, settingsModule.getIndexScopedSettings())); // add all reserved state handlers from plugins - List pluginHandlers = pluginsService.loadServiceProviders( - ReservedClusterStateHandlerProvider.class - ); - pluginHandlers.forEach(h -> reservedStateHandlers.addAll(h.handlers())); + pluginsService.loadServiceProviders(ReservedClusterStateHandlerProvider.class) + .forEach(h -> reservedStateHandlers.addAll(h.handlers())); - List terminationHandlers = pluginsService.loadServiceProviders(TerminationHandlerProvider.class) + var terminationHandlers = pluginsService.loadServiceProviders(TerminationHandlerProvider.class) .stream() - .map(prov -> prov.handler()) - .toList(); - if (terminationHandlers.size() == 1) { - this.terminationHandler = terminationHandlers.get(0); - } else if (terminationHandlers.size() > 1) { - throw new IllegalStateException( - Strings.format( - "expected at most one termination handler, but found %s: [%s]", - terminationHandlers.size(), - terminationHandlers.stream().map(it -> it.getClass().getCanonicalName()) - ) - ); - } + .map(TerminationHandlerProvider::handler); + terminationHandler = getSinglePlugin(terminationHandlers, TerminationHandler.class).orElse(null); ActionModule actionModule = new ActionModule( settings, @@ -824,8 +858,8 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi ); if (DiscoveryNode.isMasterNode(settings)) { clusterService.addListener(new SystemIndexMetadataUpgradeService(systemIndices, clusterService)); + clusterService.addListener(new TemplateUpgradeService(client, clusterService, threadPool, indexTemplateMetadataUpgraders)); } - new TemplateUpgradeService(client, clusterService, threadPool, indexTemplateMetadataUpgraders); final Transport transport = networkModule.getTransportSupplier().get(); final TransportService transportService = serviceProvider.newTransportService( pluginsService, @@ -850,7 +884,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi final RecoverySettings recoverySettings = new RecoverySettings(settings, settingsModule.getClusterSettings()); RepositoriesModule repositoriesModule = new RepositoriesModule( - this.environment, + environment, pluginsService.filterPlugins(RepositoryPlugin.class), transportService, clusterService, @@ -926,7 +960,8 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi rerouteService, fsHealthService, circuitBreakerService, - compatibilityVersions + compatibilityVersions, + featureService.getNodeFeatures() ); this.nodeService = new NodeService( settings, @@ -978,8 +1013,8 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi settings, clusterService.getClusterSettings() ); - final List> builtinTaskExecutors = List.of(systemIndexMigrationExecutor, healthNodeTaskExecutor); - final List> pluginTaskExecutors = pluginsService.filterPlugins(PersistentTaskPlugin.class) + final Stream> builtinTaskExecutors = Stream.of(systemIndexMigrationExecutor, healthNodeTaskExecutor); + final Stream> pluginTaskExecutors = pluginsService.filterPlugins(PersistentTaskPlugin.class) .stream() .map( p -> p.getPersistentTasksExecutor( @@ -990,10 +1025,9 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi clusterModule.getIndexNameExpressionResolver() ) ) - .flatMap(List::stream) - .collect(toList()); + .flatMap(List::stream); final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry( - concatLists(pluginTaskExecutors, builtinTaskExecutors) + Stream.concat(pluginTaskExecutors, builtinTaskExecutors).toList() ); final PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService( settings, @@ -1003,8 +1037,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi ); resourcesToClose.add(persistentTasksClusterService); - final List shutdownAwarePlugins = pluginsService.filterPlugins(ShutdownAwarePlugin.class); - final PluginShutdownService pluginShutdownService = new PluginShutdownService(shutdownAwarePlugins); + PluginShutdownService pluginShutdownService = new PluginShutdownService(pluginsService.filterPlugins(ShutdownAwarePlugin.class)); clusterService.addListener(pluginShutdownService); final RecoveryPlannerService recoveryPlannerService = getRecoveryPlannerService(threadPool, clusterService, repositoryService); @@ -1017,13 +1050,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi discoveryModule.getCoordinator(), masterHistoryService ); - final HealthService healthService = createHealthService( - clusterService, - clusterModule, - coordinationDiagnosticsService, - threadPool, - systemIndices - ); + final HealthService healthService = createHealthService(clusterService, coordinationDiagnosticsService, threadPool); HealthPeriodicLogger healthPeriodicLogger = createHealthPeriodicLogger(clusterService, settings, client, healthService); healthPeriodicLogger.init(); HealthMetadataService healthMetadataService = HealthMetadataService.create(clusterService, settings); @@ -1040,7 +1067,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi b.bind(PluginsService.class).toInstance(pluginsService); b.bind(Client.class).toInstance(client); b.bind(NodeClient.class).toInstance(client); - b.bind(Environment.class).toInstance(this.environment); + b.bind(Environment.class).toInstance(environment); b.bind(ThreadPool.class).toInstance(threadPool); b.bind(NodeEnvironment.class).toInstance(nodeEnvironment); b.bind(ResourceWatcherService.class).toInstance(resourceWatcherService); @@ -1073,6 +1100,7 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi b.bind(ClusterInfoService.class).toInstance(clusterInfoService); b.bind(SnapshotsInfoService.class).toInstance(snapshotsInfoService); b.bind(GatewayMetaState.class).toInstance(gatewayMetaState); + b.bind(FeatureService.class).toInstance(featureService); b.bind(Coordinator.class).toInstance(discoveryModule.getCoordinator()); b.bind(Reconfigurator.class).toInstance(discoveryModule.getReconfigurator()); { @@ -1194,13 +1222,8 @@ private void construct(Environment initialEnvironment, NodeServiceProvider servi } private Supplier getDocumentParsingObserverSupplier() { - List plugins = pluginsService.filterPlugins(DocumentParsingObserverPlugin.class); - if (plugins.size() == 1) { - return plugins.get(0).getDocumentParsingObserverSupplier(); - } else if (plugins.size() == 0) { - return () -> DocumentParsingObserver.EMPTY_INSTANCE; - } - throw new IllegalStateException("too many DocumentParsingObserverPlugin instances"); + return getSinglePlugin(DocumentParsingObserverPlugin.class).map(DocumentParsingObserverPlugin::getDocumentParsingObserverSupplier) + .orElse(() -> DocumentParsingObserver.EMPTY_INSTANCE); } /** @@ -1214,13 +1237,11 @@ private static CircuitBreakerService createCircuitBreakerService( ClusterSettings clusterSettings ) { String type = Node.BREAKER_TYPE_KEY.get(settings); - if (type.equals("hierarchy")) { - return new HierarchyCircuitBreakerService(settings, breakerSettings, clusterSettings); - } else if (type.equals("none")) { - return new NoneCircuitBreakerService(); - } else { - throw new IllegalArgumentException("Unknown circuit breaker type [" + type + "]"); - } + return switch (type) { + case "hierarchy" -> new HierarchyCircuitBreakerService(settings, breakerSettings, clusterSettings); + case "none" -> new NoneCircuitBreakerService(); + default -> throw new IllegalArgumentException("Unknown circuit breaker type [" + type + "]"); + }; } /** @@ -1234,41 +1255,27 @@ private static ReloadablePlugin wrapPlugins(List reloadablePlu try { plugin.reload(settings); } catch (IOException e) { - throw new RuntimeException(e); + throw new UncheckedIOException(e); } } }; } - private static TelemetryProvider getTelemetryProvider(PluginsService pluginsService, Settings settings) { - final List telemetryPlugins = pluginsService.filterPlugins(TelemetryPlugin.class); - - if (telemetryPlugins.size() > 1) { - throw new IllegalStateException("A single TelemetryPlugin was expected but got: " + telemetryPlugins); - } - - return telemetryPlugins.isEmpty() ? TelemetryProvider.NOOP : telemetryPlugins.get(0).getTelemetryProvider(settings); - } - private HealthService createHealthService( ClusterService clusterService, - ClusterModule clusterModule, CoordinationDiagnosticsService coordinationDiagnosticsService, - ThreadPool threadPool, - SystemIndices systemIndices + ThreadPool threadPool ) { - var serverHealthIndicatorServices = List.of( + var serverHealthIndicatorServices = Stream.of( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService), - new ShardsAvailabilityHealthIndicatorService(clusterService, clusterModule.getAllocationService(), systemIndices), new DiskHealthIndicatorService(clusterService), new ShardsCapacityHealthIndicatorService(clusterService) ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .stream() - .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) - .toList(); - return new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices), threadPool); + .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()); + return new HealthService(Stream.concat(serverHealthIndicatorServices, pluginHealthIndicatorServices).toList(), threadPool); } private static HealthPeriodicLogger createHealthPeriodicLogger( @@ -1285,39 +1292,23 @@ private RecoveryPlannerService getRecoveryPlannerService( ClusterService clusterService, RepositoriesService repositoryService ) { - final List recoveryPlannerServices = pluginsService.filterPlugins(RecoveryPlannerPlugin.class) + var recoveryPlannerServices = pluginsService.filterPlugins(RecoveryPlannerPlugin.class) .stream() .map( plugin -> plugin.createRecoveryPlannerService( new ShardSnapshotsService(client, repositoryService, threadPool, clusterService) ) ) - .filter(Optional::isPresent) - .map(Optional::get) - .toList(); - if (recoveryPlannerServices.isEmpty()) { - return new PeerOnlyRecoveryPlannerService(); - } else if (recoveryPlannerServices.size() > 1) { - throw new IllegalStateException("Expected a single RecoveryPlannerService but got: " + recoveryPlannerServices.size()); - } - return recoveryPlannerServices.get(0); + .flatMap(Optional::stream); + return getSinglePlugin(recoveryPlannerServices, RecoveryPlannerService.class).orElseGet(PeerOnlyRecoveryPlannerService::new); } private WriteLoadForecaster getWriteLoadForecaster(ThreadPool threadPool, Settings settings, ClusterSettings clusterSettings) { - final List clusterPlugins = pluginsService.filterPlugins(ClusterPlugin.class); - final List writeLoadForecasters = clusterPlugins.stream() - .flatMap(clusterPlugin -> clusterPlugin.createWriteLoadForecasters(threadPool, settings, clusterSettings).stream()) - .toList(); - - if (writeLoadForecasters.isEmpty()) { - return WriteLoadForecaster.DEFAULT; - } - - if (writeLoadForecasters.size() > 1) { - throw new IllegalStateException("A single WriteLoadForecaster was expected but got: " + writeLoadForecasters); - } + var writeLoadForecasters = pluginsService.filterPlugins(ClusterPlugin.class) + .stream() + .flatMap(clusterPlugin -> clusterPlugin.createWriteLoadForecasters(threadPool, settings, clusterSettings).stream()); - return writeLoadForecasters.get(0); + return getSinglePlugin(writeLoadForecasters, WriteLoadForecaster.class).orElse(WriteLoadForecaster.DEFAULT); } private PersistedClusterStateService newPersistedClusterStateService( @@ -1326,38 +1317,23 @@ private PersistedClusterStateService newPersistedClusterStateService( ThreadPool threadPool, CompatibilityVersions compatibilityVersions ) { - final List persistedClusterStateServiceFactories = pluginsService - .filterPlugins(ClusterCoordinationPlugin.class) + var persistedClusterStateServiceFactories = pluginsService.filterPlugins(ClusterCoordinationPlugin.class) .stream() .map(ClusterCoordinationPlugin::getPersistedClusterStateServiceFactory) - .flatMap(Optional::stream) - .toList(); + .flatMap(Optional::stream); - if (persistedClusterStateServiceFactories.size() > 1) { - throw new IllegalStateException("multiple persisted-state-service factories found: " + persistedClusterStateServiceFactories); - } - - if (persistedClusterStateServiceFactories.size() == 1) { - return persistedClusterStateServiceFactories.get(0) - .newPersistedClusterStateService(nodeEnvironment, xContentRegistry, clusterSettings, threadPool, compatibilityVersions); - } - - return new PersistedClusterStateService(nodeEnvironment, xContentRegistry, clusterSettings, threadPool::relativeTimeInMillis); - } - - /** - * Get Custom Name Resolvers list based on a Discovery Plugins list - * - * @param discoveryPlugins Discovery plugins list - */ - private List getCustomNameResolvers(List discoveryPlugins) { - List customNameResolvers = new ArrayList<>(); - for (DiscoveryPlugin discoveryPlugin : discoveryPlugins) { - NetworkService.CustomNameResolver customNameResolver = discoveryPlugin.getCustomNameResolver(environment.settings()); - if (customNameResolver != null) { - customNameResolvers.add(customNameResolver); - } - } - return customNameResolvers; + return getSinglePlugin(persistedClusterStateServiceFactories, ClusterCoordinationPlugin.PersistedClusterStateServiceFactory.class) + .map( + f -> f.newPersistedClusterStateService( + nodeEnvironment, + xContentRegistry, + clusterSettings, + threadPool, + compatibilityVersions + ) + ) + .orElseGet( + () -> new PersistedClusterStateService(nodeEnvironment, xContentRegistry, clusterSettings, threadPool::relativeTimeInMillis) + ); } } diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index e11d6016ad5cd..ab90ca42bca98 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -49,8 +49,9 @@ */ class NodeServiceProvider { - Function pluginsServiceCtor(Environment initialEnvironment) { - return PluginsService.getPluginsServiceCtor(initialEnvironment); + PluginsService newPluginService(Environment environment, Settings settings) { + // this creates a PluginsService with an empty list of classpath plugins + return new PluginsService(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index bd5d8e9220517..de9f8186865aa 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -21,9 +21,11 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.indices.SystemIndices; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.script.ScriptService; import org.elasticsearch.telemetry.TelemetryProvider; @@ -62,6 +64,93 @@ */ public abstract class Plugin implements Closeable { + /** + * Provides access to various Elasticsearch services. + */ + public interface PluginServices { + /** + * A client to make requests to the system + */ + Client client(); + + /** + * A service to allow watching and updating cluster state + */ + ClusterService clusterService(); + + /** + * A service to allow retrieving an executor to run an async action + */ + ThreadPool threadPool(); + + /** + * A service to watch for changes to node local files + */ + ResourceWatcherService resourceWatcherService(); + + /** + * A service to allow running scripts on the local node + */ + ScriptService scriptService(); + + /** + * The registry for extensible xContent parsing + */ + NamedXContentRegistry xContentRegistry(); + + /** + * The environment for path and setting configurations + */ + Environment environment(); + + /** + * The node environment used coordinate access to the data paths + */ + NodeEnvironment nodeEnvironment(); + + /** + * The registry for {@link NamedWriteable} object parsing + */ + NamedWriteableRegistry namedWriteableRegistry(); + + /** + * A service that resolves expression to index and alias names + */ + IndexNameExpressionResolver indexNameExpressionResolver(); + + /** + * A supplier for the service that manages snapshot repositories. + * This will return null when {@link #createComponents(PluginServices)} is called, + * but will return the repositories service once the node is initialized. + */ + Supplier repositoriesServiceSupplier(); + + /** + * An interface for distributed tracing + */ + TelemetryProvider telemetryProvider(); + + /** + * A service to manage shard allocation in the cluster + */ + AllocationService allocationService(); + + /** + * A service to manage indices in the cluster + */ + IndicesService indicesService(); + + /** + * A service to access features supported by nodes in the cluster + */ + FeatureService featureService(); + + /** + * The system indices for the cluster + */ + SystemIndices systemIndices(); + } + /** * Returns components added by this plugin. *

    @@ -69,38 +158,9 @@ public abstract class Plugin implements Closeable { * Note: To aid in the migration away from guice, all objects returned as components will be bound in guice * to themselves. * - * @param client A client to make requests to the system - * @param clusterService A service to allow watching and updating cluster state - * @param threadPool A service to allow retrieving an executor to run an async action - * @param resourceWatcherService A service to watch for changes to node local files - * @param scriptService A service to allow running scripts on the local node - * @param xContentRegistry the registry for extensible xContent parsing - * @param environment the environment for path and setting configurations - * @param nodeEnvironment the node environment used coordinate access to the data paths - * @param namedWriteableRegistry the registry for {@link NamedWriteable} object parsing - * @param indexNameExpressionResolver A service that resolves expression to index and alias names - * @param repositoriesServiceSupplier A supplier for the service that manages snapshot repositories; will return null when this method - * is called, but will return the repositories service once the node is initialized. - * @param telemetryProvider An interface for distributed tracing - * @param allocationService A service to manage shard allocation in the cluster - * @param indicesService A service to manage indices in the cluster + * @param services Provides access to various Elasticsearch services */ - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { return Collections.emptyList(); } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index e55e5d96aa532..3b838db463a4f 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -25,7 +25,6 @@ import org.elasticsearch.core.PathUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; -import org.elasticsearch.env.Environment; import org.elasticsearch.jdk.JarHell; import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.node.ReportingService; @@ -362,7 +361,7 @@ public T loadSingletonServiceProvider(Class service, Supplier fallback var services = loadServiceProviders(service); if (services.size() > 1) { throw new IllegalStateException(String.format(Locale.ROOT, "More than one extension found for %s", service.getSimpleName())); - } else if (services.size() == 0) { + } else if (services.isEmpty()) { return fallback.get(); } return services.get(0); @@ -715,16 +714,6 @@ public final List filterPlugins(Class type) { return plugins().stream().filter(x -> type.isAssignableFrom(x.instance().getClass())).map(p -> ((T) p.instance())).toList(); } - /** - * Get a function that will take a {@link Settings} object and return a {@link PluginsService}. - * This function passes in an empty list of classpath plugins. - * @param environment The environment for the plugins service. - * @return A function for creating a plugins service. - */ - public static Function getPluginsServiceCtor(Environment environment) { - return settings -> new PluginsService(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); - } - static final LayerAndLoader createPluginModuleLayer( PluginBundle bundle, ClassLoader parentLoader, diff --git a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java index e0fbb082307ee..b4dd0a2f37b39 100644 --- a/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/FilterRepository.java @@ -27,6 +27,7 @@ import java.io.IOException; import java.util.Collection; import java.util.Set; +import java.util.concurrent.Executor; public class FilterRepository implements Repository { @@ -61,8 +62,8 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } @Override - public void getRepositoryData(ActionListener listener) { - in.getRepositoryData(listener); + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { + in.getRepositoryData(responseExecutor, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/repositories/IndexSnapshotsService.java b/server/src/main/java/org/elasticsearch/repositories/IndexSnapshotsService.java index e274268eb4b29..c89905aa3eb74 100644 --- a/server/src/main/java/org/elasticsearch/repositories/IndexSnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/repositories/IndexSnapshotsService.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.shard.ShardId; @@ -127,7 +128,10 @@ public void getLatestSuccessfulSnapshotForShard( delegate.onResponse(indexShardSnapshotInfo); })); - repository.getRepositoryData(repositoryDataStepListener); + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading here, do we need to fork, see #101445? + repositoryDataStepListener + ); } private Repository getRepository(String repositoryName) { diff --git a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java index 55a77551702da..ad0f956a16643 100644 --- a/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/InvalidRepository.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.Collection; +import java.util.concurrent.Executor; /** * Represents a repository that exists in the cluster state but could not be instantiated on a node, typically due to invalid configuration. @@ -68,7 +69,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { listener.onFailure(createCreationException()); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index bb522cddd22f7..32c32369a5fae 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.fs.FsRepository; @@ -47,7 +48,7 @@ public RepositoriesModule( RecoverySettings recoverySettings, TelemetryProvider telemetryProvider ) { - telemetryProvider.getMeter().registerLongCounter(METRIC_REQUESTS_COUNT, "repository request counter", "unit"); + telemetryProvider.getMeterRegistry().registerLongCounter(METRIC_REQUESTS_COUNT, "repository request counter", "unit"); Map factories = new HashMap<>(); factories.put( FsRepository.TYPE, @@ -104,7 +105,7 @@ public RepositoriesModule( "the snapshot was created with Elasticsearch version [" + version + "] which is below the current versions minimum index compatibility version [" - + IndexVersion.MINIMUM_COMPATIBLE + + IndexVersions.MINIMUM_COMPATIBLE + "]" ); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index da9610a77a563..095f70a3e5966 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; @@ -182,7 +183,15 @@ public void registerRepository(final PutRepositoryRequest request, final ActionL verifyStep.addListener( listener.delegateFailureAndWrap( (l, ignored) -> threadPool.generic() - .execute(ActionRunnable.wrap(getRepositoryDataStep, ll -> repository(request.name()).getRepositoryData(ll))) + .execute( + ActionRunnable.wrap( + getRepositoryDataStep, + ll -> repository(request.name()).getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading, do we need to fork, see #101445? + ll + ) + ) + ) ) ); @@ -630,7 +639,10 @@ public void getRepositoryData(final String repositoryName, final ActionListener< try { Repository repository = repository(repositoryName); assert repository != null; // should only be called once we've validated the repository exists - repository.getRepositoryData(listener); + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading here, do we need to fork, see #101445? + listener + ); } catch (Exception e) { listener.onFailure(e); } diff --git a/server/src/main/java/org/elasticsearch/repositories/Repository.java b/server/src/main/java/org/elasticsearch/repositories/Repository.java index 8f07997f3f392..1fd01631818bc 100644 --- a/server/src/main/java/org/elasticsearch/repositories/Repository.java +++ b/server/src/main/java/org/elasticsearch/repositories/Repository.java @@ -30,6 +30,7 @@ import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.concurrent.Executor; import java.util.function.Function; /** @@ -117,13 +118,17 @@ public void onFailure(Exception e) { IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId index) throws IOException; /** - * Returns a {@link RepositoryData} to describe the data in the repository, including the snapshots - * and the indices across all snapshots found in the repository. Throws a {@link RepositoryException} - * if there was an error in reading the data. - * @param listener listener that may be resolved on different kinds of threads including transport and cluster state applier threads - * and therefore must fork to a new thread for executing any long running actions + * Returns a {@link RepositoryData} to describe the data in the repository, including the snapshots and the indices across all snapshots + * found in the repository. Completes the listener with a {@link RepositoryException} if there was an error in reading the data. + * + * @param responseExecutor Executor to use to complete the listener if not using the calling thread. Using {@link + * org.elasticsearch.common.util.concurrent.EsExecutors#DIRECT_EXECUTOR_SERVICE} means to complete the listener + * on the thread which ultimately resolved the {@link RepositoryData}, which might be a low-latency transport or + * cluster applier thread so make sure not to do anything slow or expensive in that case. + * @param listener Listener which is either completed on the calling thread (if the {@link RepositoryData} is immediately + * available, e.g. from an in-memory cache), otherwise it is completed using {@code responseExecutor}. */ - void getRepositoryData(ActionListener listener); + void getRepositoryData(Executor responseExecutor, ActionListener listener); /** * Finalizes snapshotting process diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java index 02a92bd3d848d..1c5ea5a2b0012 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoryData.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.snapshots.SnapshotId; @@ -713,7 +714,7 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final // Likewise if we simply encode the numeric IndexVersion as a string then versions from 8.11.0 onwards will report the exact // string in this message, which is not especially helpful to users. Slightly more helpful than the opaque parse error reported // by earlier versions, but still not great. TODO rethink this if and when adding a new snapshot repository format version. - if (minVersion.before(IndexVersion.V_8_10_0)) { + if (minVersion.before(IndexVersions.V_8_10_0)) { // write as a string builder.field(MIN_VERSION, Version.fromId(minVersion.id()).toString()); } else { @@ -783,7 +784,7 @@ public XContentBuilder snapshotsToXContent(final XContentBuilder builder, final numericIndexVersionMarkerPlaceholdersUsed += 1; lastSnapshotWithNumericIndexVersionPlaceholder = snapshot; builder.field(VERSION, NUMERIC_INDEX_VERSION_MARKER_STRING); - } else if (version.onOrAfter(IndexVersion.FIRST_DETACHED_INDEX_VERSION)) { + } else if (version.onOrAfter(IndexVersions.FIRST_DETACHED_INDEX_VERSION)) { builder.field(VERSION, NUMERIC_INDEX_VERSION_MARKER_STRING); builder.field(INDEX_VERSION, version.id()); } else { @@ -883,9 +884,9 @@ public static RepositoryData snapshotsFromXContent(XContentParser parser, long g XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser); final var versionString = parser.text(); final var version = switch (versionString) { - case "7.12.0" -> IndexVersion.V_7_12_0; - case "7.9.0" -> IndexVersion.V_7_9_0; - case "7.6.0" -> IndexVersion.V_7_6_0; + case "7.12.0" -> IndexVersions.V_7_12_0; + case "7.9.0" -> IndexVersions.V_7_9_0; + case "7.6.0" -> IndexVersions.V_7_6_0; default -> // All (known) versions only ever emit one of the above strings for the format version, so if we see something // else it must be a newer version or else something wholly invalid. Report the raw string rather than trying diff --git a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java index 4b08c0f19ea8a..b9da0b1663c59 100644 --- a/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/UnknownTypeRepository.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.Collection; +import java.util.concurrent.Executor; /** * This class represents a repository that could not be initialized due to unknown type. @@ -66,7 +67,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { listener.onFailure(createUnknownTypeException()); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 79e2ed3c5c206..1af4a1b8f1337 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1576,7 +1576,14 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new SubscribableListener // Get the current RepositoryData - .newForked(this::getRepositoryData) + .newForked( + listener -> getRepositoryData( + // TODO we might already be on a SNAPSHOT thread, make it so that we're always on a SNAPSHOT thread here and then we + // can avoid a little more forking below + EsExecutors.DIRECT_EXECUTOR_SERVICE, + listener + ) + ) // Identify and write the missing metadata .andThen((l, existingRepositoryData) -> { @@ -1673,7 +1680,17 @@ record RootBlobUpdateResult(RepositoryData oldRepositoryData, RepositoryData new ), repositoryStateId, repositoryMetaVersion, - finalizeSnapshotContext::updatedClusterState, + new Function<>() { + @Override + public ClusterState apply(ClusterState state) { + return finalizeSnapshotContext.updatedClusterState(state); + } + + @Override + public String toString() { + return "finalizing snapshot [" + metadata.name() + "][" + snapshotId + "]"; + } + }, l.map(newRepositoryData -> new RootBlobUpdateResult(existingRepositoryData, newRepositoryData)) ); // NB failure of writeIndexGen doesn't guarantee the update failed, so we cannot safely clean anything up on failure @@ -2007,7 +2024,7 @@ public void endVerification(String seed) { private final AtomicReference latestKnownRepositoryData = new AtomicReference<>(RepositoryData.EMPTY); @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { // RepositoryData is the responsibility of the elected master: we shouldn't be loading it on other nodes as we don't have good // consistency guarantees there, but electedness is too ephemeral to assert. We can say for sure that this node should be // master-eligible, which is almost as strong since all other snapshot-related activity happens on data nodes whether they be @@ -2037,7 +2054,7 @@ public void getRepositoryData(ActionListener listener) { logger.debug(""" [{}] loading repository metadata for the first time, trying to determine correct generation and to store it in the \ cluster state""", metadata.name()); - if (initializeRepoGenerationTracking(listener)) { + if (initializeRepoGenerationTracking(responseExecutor, listener)) { return; } // else there was a concurrent modification, retry from the start } else { @@ -2046,7 +2063,7 @@ public void getRepositoryData(ActionListener listener) { metadata.name(), latestKnownRepoGen ); - repoDataLoadDeduplicator.execute(listener); + repoDataLoadDeduplicator.execute(new ThreadedActionListener<>(responseExecutor, listener)); return; } } @@ -2066,13 +2083,14 @@ private RepositoryException notStartedException() { * This ensures that operations using {@code SnapshotsService#executeConsistentStateUpdate} right after mounting a fresh repository will * have a consistent view of the {@link RepositoryData} before any data has been written to the repository. * - * @param listener listener to resolve with new repository data + * @param responseExecutor executor to use to complete the listener if not completing it on the calling thread + * @param listener listener to resolve with new repository data * @return {@code true} if this method at least started the initialization process successfully and will eventually complete the * listener, {@code false} if there was some concurrent state change which prevents us from starting repo generation tracking (typically * that some other node got there first) and the caller should check again and possibly retry or complete the listener in some other * way. */ - private boolean initializeRepoGenerationTracking(ActionListener listener) { + private boolean initializeRepoGenerationTracking(Executor responseExecutor, ActionListener listener) { final SubscribableListener listenerToSubscribe; final ActionListener listenerToComplete; @@ -2160,7 +2178,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) .addListener(listenerToComplete); } - listenerToSubscribe.addListener(listener, EsExecutors.DIRECT_EXECUTOR_SERVICE, threadPool.getThreadContext()); + listenerToSubscribe.addListener(listener, responseExecutor, threadPool.getThreadContext()); return true; } @@ -2570,6 +2588,11 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) logger.trace("[{}] successfully set pending repository generation to [{}]", metadata.name(), newGen); setPendingStep.onResponse(newGen); } + + @Override + public String toString() { + return Strings.format("start RepositoryData update from generation [%d], stateFilter=[%s]", expectedGen, stateFilter); + } }); final ListenableFuture filterRepositoryDataStep = new ListenableFuture<>(); @@ -2630,7 +2653,7 @@ public void onFailure(Exception e) { if (ensureSafeGenerationExists(expectedGen, delegate::onFailure) == false) { return; } - final String indexBlob = INDEX_FILE_PREFIX + Long.toString(newGen); + final String indexBlob = INDEX_FILE_PREFIX + newGen; logger.debug("Repository [{}] writing new index generational blob [{}]", metadata.name(), indexBlob); writeAtomic(blobContainer(), indexBlob, out -> { try (XContentBuilder xContentBuilder = XContentFactory.jsonBuilder(org.elasticsearch.core.Streams.noCloseStream(out))) { @@ -2689,6 +2712,16 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) cacheRepositoryData(newRepositoryData, version); delegate.onResponse(newRepositoryData); } + + @Override + public String toString() { + return Strings.format( + "complete RepositoryData update from generation [%d] to generation [%d], stateFilter=[%s]", + expectedGen, + newGen, + stateFilter + ); + } }); })); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/MeteredBlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/MeteredBlobStoreRepository.java index c69270011fc7b..4029938c6fc5f 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/MeteredBlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/MeteredBlobStoreRepository.java @@ -16,7 +16,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.repositories.RepositoryInfo; import org.elasticsearch.repositories.RepositoryStatsSnapshot; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -24,7 +24,7 @@ public abstract class MeteredBlobStoreRepository extends BlobStoreRepository { private final RepositoryInfo repositoryInfo; - protected final Meter meter; + protected final MeterRegistry meterRegistry; public MeteredBlobStoreRepository( RepositoryMetadata metadata, @@ -34,10 +34,10 @@ public MeteredBlobStoreRepository( RecoverySettings recoverySettings, BlobPath basePath, Map location, - Meter meter + MeterRegistry meterRegistry ) { super(metadata, namedXContentRegistry, clusterService, bigArrays, recoverySettings, basePath); - this.meter = meter; + this.meterRegistry = meterRegistry; ThreadPool threadPool = clusterService.getClusterApplierService().threadPool(); this.repositoryInfo = new RepositoryInfo( UUIDs.randomBase64UUID(), diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java index 3adf32454cc20..f6d5ab3ead6af 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/ReservedClusterStateService.java @@ -239,7 +239,7 @@ public void onFailure(Exception e) { @Override public void onFailure(Exception e) { // If we encounter an error while runnin the non-state transforms, we avoid saving any cluster state. - errorListener.accept(checkAndReportError(namespace, List.of(e.getMessage()), reservedStateVersion)); + errorListener.accept(checkAndReportError(namespace, List.of(stackTrace(e)), reservedStateVersion)); } }); } diff --git a/server/src/main/java/org/elasticsearch/script/field/IPAddress.java b/server/src/main/java/org/elasticsearch/script/field/IPAddress.java index 53f0f118a09b0..ed49928d6afa6 100644 --- a/server/src/main/java/org/elasticsearch/script/field/IPAddress.java +++ b/server/src/main/java/org/elasticsearch/script/field/IPAddress.java @@ -8,6 +8,11 @@ package org.elasticsearch.script.field; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.GenericNamedWriteable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -20,7 +25,8 @@ /** * IP address for use in scripting. */ -public class IPAddress implements ToXContentObject { +public class IPAddress implements ToXContentObject, GenericNamedWriteable { + static final String NAMED_WRITEABLE_NAME = "IPAddress"; protected final InetAddress address; IPAddress(InetAddress address) { @@ -31,6 +37,14 @@ public IPAddress(String address) { this.address = InetAddresses.forString(address); } + public IPAddress(StreamInput input) throws IOException { + this(input.readString()); + } + + public void writeTo(StreamOutput output) throws IOException { + output.writeString(toString()); + } + public boolean isV4() { return address instanceof Inet4Address; } @@ -49,4 +63,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder.value(this.toString()); } + @Override + public String getWriteableName() { + return NAMED_WRITEABLE_NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.IP_ADDRESS_WRITEABLE; + } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index dafcf45454aaf..6919cfdbc00b4 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -224,7 +224,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv public static final Setting QUERY_PHASE_PARALLEL_COLLECTION_ENABLED = Setting.boolSetting( "search.query_phase_parallel_collection_enabled", - false, + true, Property.NodeScope, Property.Dynamic ); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java index fd4fbac82334e..35b8230a48554 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/CoreValuesSourceType.java @@ -40,6 +40,7 @@ import java.util.List; import java.util.Locale; import java.util.function.Function; +import java.util.function.LongSupplier; /** * {@link CoreValuesSourceType} holds the {@link ValuesSourceType} implementations for the core aggregations package. @@ -79,13 +80,13 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { Number missing; if (rawMissing instanceof Number) { missing = (Number) rawMissing; } else { - missing = docValueFormat.parseDouble(rawMissing.toString(), false, context::nowInMillis); + missing = docValueFormat.parseDouble(rawMissing.toString(), false, nowInMillis); } return MissingValues.replaceMissing((ValuesSource.Numeric) valuesSource, missing); } @@ -138,7 +139,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { final BytesRef missing = docValueFormat.parseBytesRef(rawMissing.toString()); if (valuesSource instanceof ValuesSource.Bytes.WithOrdinals) { @@ -174,7 +175,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { // TODO: also support the structured formats of geo points final GeoPoint missing = new GeoPoint(rawMissing.toString()); @@ -213,7 +214,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { throw new IllegalArgumentException("Can't apply missing values on a " + valuesSource.getClass()); } @@ -239,9 +240,9 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { - return KEYWORD.replaceMissing(valuesSource, rawMissing, docValueFormat, context); + return KEYWORD.replaceMissing(valuesSource, rawMissing, docValueFormat, nowInMillis); } @Override @@ -385,9 +386,9 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { - return NUMERIC.replaceMissing(valuesSource, rawMissing, docValueFormat, context); + return NUMERIC.replaceMissing(valuesSource, rawMissing, docValueFormat, nowInMillis); } @Override @@ -422,9 +423,9 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { - return NUMERIC.replaceMissing(valuesSource, rawMissing, docValueFormat, context); + return NUMERIC.replaceMissing(valuesSource, rawMissing, docValueFormat, nowInMillis); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesValuesSourceType.java index ae720528b680f..e1e249466aea6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesValuesSourceType.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.aggregations.AggregationErrors; import java.util.Locale; +import java.util.function.LongSupplier; import static org.elasticsearch.search.aggregations.support.CoreValuesSourceType.GEOPOINT; @@ -51,7 +52,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { throw new IllegalArgumentException("Cannot replace missing values for time-series counters"); } @@ -77,9 +78,9 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { - return GEOPOINT.replaceMissing(valuesSource, rawMissing, docValueFormat, context); + return GEOPOINT.replaceMissing(valuesSource, rawMissing, docValueFormat, nowInMillis); } }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java index 028e2b922ee05..85788c1964b40 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfig.java @@ -23,6 +23,7 @@ import java.time.ZoneId; import java.util.function.DoubleUnaryOperator; import java.util.function.Function; +import java.util.function.LongSupplier; /** * A configuration that tells aggregations how to retrieve data from the index @@ -183,7 +184,7 @@ private static ValuesSourceConfig internalResolve( missing, timeZone, docValueFormat, - context + context::nowInMillis ); return config; } @@ -257,14 +258,14 @@ private static DocValueFormat resolveFormat( public static ValuesSourceConfig resolveFieldOnly(MappedFieldType fieldType, AggregationContext context) { FieldContext fieldContext = context.buildFieldContext(fieldType); ValuesSourceType vstype = fieldContext.indexFieldData().getValuesSourceType(); - return new ValuesSourceConfig(vstype, fieldContext, false, null, null, null, null, null, context); + return new ValuesSourceConfig(vstype, fieldContext, false, null, null, null, null, null, context::nowInMillis); } /** * Convenience method for creating unmapped configs */ public static ValuesSourceConfig resolveUnmapped(ValuesSourceType valuesSourceType, AggregationContext context) { - return new ValuesSourceConfig(valuesSourceType, null, true, null, null, null, null, null, context); + return new ValuesSourceConfig(valuesSourceType, null, true, null, null, null, null, null, context::nowInMillis); } private final ValuesSourceType valuesSourceType; @@ -277,10 +278,6 @@ public static ValuesSourceConfig resolveUnmapped(ValuesSourceType valuesSourceTy private final ZoneId timeZone; private final ValuesSource valuesSource; - private ValuesSourceConfig() { - throw new UnsupportedOperationException(); - } - @SuppressWarnings("this-escape") public ValuesSourceConfig( ValuesSourceType valuesSourceType, @@ -291,7 +288,7 @@ public ValuesSourceConfig( Object missing, ZoneId timeZone, DocValueFormat format, - AggregationContext context + LongSupplier nowInMillis ) { if (unmapped && fieldContext != null) { throw new IllegalStateException("value source config is invalid; marked as unmapped but specified a mapped field"); @@ -311,10 +308,10 @@ public ValuesSourceConfig( "value source config is invalid; must have either a field context or a script or marked as unwrapped" ); } - valuesSource = constructValuesSource(missing, format, context); + valuesSource = constructValuesSource(missing, format, nowInMillis); } - private ValuesSource constructValuesSource(Object missing, DocValueFormat format, AggregationContext context) { + private ValuesSource constructValuesSource(Object missing, DocValueFormat format, LongSupplier nowInMillis) { final ValuesSource vs; if (this.unmapped) { vs = valueSourceType().getEmpty(); @@ -329,7 +326,7 @@ private ValuesSource constructValuesSource(Object missing, DocValueFormat format } if (missing() != null) { - return valueSourceType().replaceMissing(vs, missing, format, context); + return valueSourceType().replaceMissing(vs, missing, format, nowInMillis); } else { return vs; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java index 684fecb88e740..00482ea380e1f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceType.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import java.time.ZoneId; +import java.util.function.LongSupplier; /** * {@link ValuesSourceType} represents a collection of fields that share a common set of operations, for example all numeric fields. @@ -64,13 +65,13 @@ public interface ValuesSourceType { * Apply the given missing value to an already-constructed {@link ValuesSource}. Types which do not support missing values should throw * {@link org.elasticsearch.search.aggregations.AggregationExecutionException} * - * @param valuesSource - The original {@link ValuesSource} - * @param rawMissing - The missing value we got from the parser, typically a string or number + * @param valuesSource - The original {@link ValuesSource} + * @param rawMissing - The missing value we got from the parser, typically a string or number * @param docValueFormat - The format to use for further parsing the user supplied value, e.g. a date format - * @param context - Context for this aggregation used to handle {@link AggregationContext#nowInMillis() "now"} + * @param nowInMillis - supplier of current time and date in milliseconds * @return - Wrapper over the provided {@link ValuesSource} to apply the given missing value */ - ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, AggregationContext context); + ValuesSource replaceMissing(ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, LongSupplier nowInMillis); /** * This method provides a hook for specifying a type-specific formatter. When {@link ValuesSourceConfig} can resolve a diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 043968e254d1d..2d9d6d1d8d75d 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -117,6 +117,8 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField POINT_IN_TIME = new ParseField("pit"); public static final ParseField RUNTIME_MAPPINGS_FIELD = new ParseField("runtime_mappings"); + private static final boolean RANK_SUPPORTED = Booleans.parseBoolean(System.getProperty("es.search.rank_supported"), true); + /** * A static factory method to construct a new search source. */ @@ -1351,6 +1353,9 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr knnSearch = List.of(KnnSearchBuilder.fromXContent(parser)); searchUsage.trackSectionUsage(KNN_FIELD.getPreferredName()); } else if (RANK_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + if (RANK_SUPPORTED == false) { + throwUnknownKey(parser, token, currentFieldName); + } if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { throw new ParsingException( parser.getTokenLocation(), @@ -1555,6 +1560,9 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr } searchUsage.trackSectionUsage(KNN_FIELD.getPreferredName()); } else if (SUB_SEARCHES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + if (RANK_SUPPORTED == false) { + throwUnknownKey(parser, token, currentFieldName); + } if (subSearchSourceBuilders.isEmpty() == false) { throw new IllegalArgumentException( "cannot specify field [" + currentFieldName + "] and field [" + QUERY_FIELD.getPreferredName() + "]" @@ -1572,18 +1580,10 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr } searchUsage.trackSectionUsage(SUB_SEARCHES_FIELD.getPreferredName()); } else { - throw new ParsingException( - parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation() - ); + throwUnknownKey(parser, token, currentFieldName); } } else { - throw new ParsingException( - parser.getTokenLocation(), - "Unknown key for a " + token + " in [" + currentFieldName + "].", - parser.getTokenLocation() - ); + throwUnknownKey(parser, token, currentFieldName); } } if (checkTrailingTokens) { @@ -1596,6 +1596,15 @@ private SearchSourceBuilder parseXContent(XContentParser parser, boolean checkTr return this; } + private static void throwUnknownKey(XContentParser parser, XContentParser.Token token, String currentFieldName) + throws ParsingException { + throw new ParsingException( + parser.getTokenLocation(), + "Unknown key for a " + token + " in [" + currentFieldName + "].", + parser.getTokenLocation() + ); + } + public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { if (from != -1) { builder.field(FROM_FIELD.getPreferredName(), from); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index eec1d70d17423..3c69db98c7588 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -87,7 +87,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private QueryProfiler profiler; private final MutableQueryTimeout cancellable; - private final LeafSlice[] leafSlices; + private final int maximumNumberOfSlices; // don't create slices with less than this number of docs private final int minimumDocsPerSlice; @@ -150,13 +150,15 @@ public ContextIndexSearcher( setQueryCachingPolicy(queryCachingPolicy); this.cancellable = cancellable; this.minimumDocsPerSlice = minimumDocsPerSlice; - if (executor == null) { - this.leafSlices = null; - } else { - // we offload to the executor unconditionally, including requests that don't support concurrency - this.leafSlices = computeSlices(getLeafContexts(), maximumNumberOfSlices, minimumDocsPerSlice); - assert this.leafSlices.length <= maximumNumberOfSlices : "more slices created than the maximum allowed"; - } + this.maximumNumberOfSlices = maximumNumberOfSlices; + } + + @Override + protected LeafSlice[] slices(List leaves) { + // we offload to the executor unconditionally, including requests that don't support concurrency + LeafSlice[] leafSlices = computeSlices(getLeafContexts(), maximumNumberOfSlices, minimumDocsPerSlice); + assert leafSlices.length <= maximumNumberOfSlices : "more slices created than the maximum allowed"; + return leafSlices; } // package private for testing @@ -238,15 +240,6 @@ public Weight createWeight(Query query, ScoreMode scoreMode, float boost) throws } } - /** - * Returns the slices created by this {@link ContextIndexSearcher}, different from those created by the base class and - * returned by {@link IndexSearcher#getSlices()}. The former are used for parallelizing the collection, while the latter are used - * for now to parallelize rewrite (e.g. knn query rewrite) - */ - final LeafSlice[] getSlicesForCollection() { - return leafSlices; - } - /** * Each computed slice contains at least 10% of the total data in the leaves with a * minimum given by the minDocsPerSlice parameter and the final number @@ -346,7 +339,9 @@ private T search(Weight weight, CollectorManager if (getExecutor() == null) { search(leafContexts, weight, firstCollector); return collectorManager.reduce(Collections.singletonList(firstCollector)); - } else if (leafSlices.length == 0) { + } + LeafSlice[] leafSlices = getSlices(); + if (leafSlices.length == 0) { assert leafContexts.isEmpty(); doAggregationPostCollection(firstCollector); return collectorManager.reduce(Collections.singletonList(firstCollector)); @@ -499,12 +494,7 @@ public void throwTimeExceededException() { } private static class TimeExceededException extends RuntimeException { - - @Override - public Throwable fillInStackTrace() { - // never re-thrown so we can save the expensive stacktrace - return this; - } + // This exception should never be re-thrown, but we fill in the stacktrace to be able to trace where it does not get properly caught } /** diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index e4dffea062721..933d2198a2dae 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.LuceneDocument; @@ -271,7 +272,7 @@ public void validateReferences(IndexVersion indexVersionCreated, Function repositoryDataListener = new ListenableFuture<>(); - repository.getRepositoryData(repositoryDataListener); + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading here, do we need to fork, see #101445? + repositoryDataListener + ); repositoryDataListener.addListener( listener.delegateFailureAndWrap( @@ -516,17 +520,20 @@ static void refreshRepositoryUuids(boolean enabled, RepositoriesService reposito if (repository instanceof BlobStoreRepository && repository.getMetadata().uuid().equals(RepositoryData.MISSING_UUID)) { final var repositoryName = repository.getMetadata().name(); logger.info("refreshing repository UUID for repository [{}]", repositoryName); - repository.getRepositoryData(ActionListener.releaseAfter(new ActionListener<>() { - @Override - public void onResponse(RepositoryData repositoryData) { - logger.debug(() -> format("repository UUID [{}] refresh completed", repositoryName)); - } + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading here, do we need to fork, see #101445? + ActionListener.releaseAfter(new ActionListener<>() { + @Override + public void onResponse(RepositoryData repositoryData) { + logger.debug(() -> format("repository UUID [{}] refresh completed", repositoryName)); + } - @Override - public void onFailure(Exception e) { - logger.debug(() -> format("repository UUID [{}] refresh failed", repositoryName), e); - } - }, refs.acquire())); + @Override + public void onFailure(Exception e) { + logger.debug(() -> format("repository UUID [{}] refresh failed", repositoryName), e); + } + }, refs.acquire()) + ); } } } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SearchableSnapshotsSettings.java b/server/src/main/java/org/elasticsearch/snapshots/SearchableSnapshotsSettings.java index 6dd99fdd7145b..e1a8a90d1b6ea 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SearchableSnapshotsSettings.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SearchableSnapshotsSettings.java @@ -32,6 +32,7 @@ public final class SearchableSnapshotsSettings { public static final String SEARCHABLE_SNAPSHOTS_SNAPSHOT_NAME_SETTING_KEY = "index.store.snapshot.snapshot_name"; public static final String SEARCHABLE_SNAPSHOTS_SNAPSHOT_UUID_SETTING_KEY = "index.store.snapshot.snapshot_uuid"; public static final String SEARCHABLE_SNAPSHOTS_DELETE_SNAPSHOT_ON_INDEX_DELETION = "index.store.snapshot.delete_searchable_snapshot"; + public static final String SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY = "index.store.snapshot.index_name"; private SearchableSnapshotsSettings() {} diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 7f502484f6372..7159f1a1ee5dd 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -74,6 +74,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.SystemDataStreamDescriptor; import org.elasticsearch.indices.SystemIndices; @@ -130,16 +131,16 @@ */ public class SnapshotsService extends AbstractLifecycleComponent implements ClusterStateApplier { - public static final IndexVersion SHARD_GEN_IN_REPO_DATA_VERSION = IndexVersion.V_7_6_0; + public static final IndexVersion SHARD_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_6_0; - public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersion.V_7_9_0; + public static final IndexVersion INDEX_GEN_IN_REPO_DATA_VERSION = IndexVersions.V_7_9_0; - public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersion.V_7_12_0; + public static final IndexVersion UUIDS_IN_REPO_DATA_VERSION = IndexVersions.V_7_12_0; public static final TransportVersion UUIDS_IN_REPO_DATA_TRANSPORT_VERSION = TransportVersions.V_7_12_0; - public static final IndexVersion FILE_INFO_WRITER_UUIDS_IN_SHARD_DATA_VERSION = IndexVersion.V_7_16_0; + public static final IndexVersion FILE_INFO_WRITER_UUIDS_IN_SHARD_DATA_VERSION = IndexVersions.V_7_16_0; - public static final IndexVersion OLD_SNAPSHOT_FORMAT = IndexVersion.V_7_5_0; + public static final IndexVersion OLD_SNAPSHOT_FORMAT = IndexVersions.V_7_5_0; public static final String POLICY_ID_METADATA_FIELD = "policy"; @@ -277,6 +278,7 @@ private void submitCreateSnapshotRequest( RepositoryMetadata initialRepositoryMetadata ) { repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // Listener is lightweight, only submits a cluster state update task, no need to fork listener.delegateFailure( (l, repositoryData) -> masterServiceTaskQueue.submitTask( "create_snapshot [" + snapshot.getSnapshotId().getName() + ']', @@ -461,14 +463,17 @@ private void startCloning(Repository repository, SnapshotsInProgress.Entry clone } } // 2. step, load the number of shards we have in each index to be cloned from the index metadata. - repository.getRepositoryData(ActionListener.wrap(repositoryData -> { - for (IndexId index : indices) { - executor.execute(ActionRunnable.supply(shardCountListener, () -> { - final IndexMetadata metadata = repository.getSnapshotIndexMetaData(repositoryData, sourceSnapshot, index); - return Tuple.tuple(index, metadata.getNumberOfShards()); - })); - } - }, onFailure)); + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading here, do we need to fork, see #101445? + ActionListener.wrap(repositoryData -> { + for (IndexId index : indices) { + executor.execute(ActionRunnable.supply(shardCountListener, () -> { + final IndexMetadata metadata = repository.getSnapshotIndexMetaData(repositoryData, sourceSnapshot, index); + return Tuple.tuple(index, metadata.getNumberOfShards()); + })); + } + }, onFailure) + ); }, onFailure)); // 3. step, we have all the shard counts, now update the cluster state to have clone jobs in the snap entry @@ -561,6 +566,11 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) logger.warn("Did not find expected entry [{}] in the cluster state", cloneEntry); } } + + @Override + public String toString() { + return Strings.format("start snapshot clone [%s] from [%s]", updatedEntry.snapshot(), updatedEntry.source()); + } }, "start snapshot clone", onFailure), onFailure) ); } @@ -1262,19 +1272,31 @@ private void endSnapshot(SnapshotsInProgress.Entry entry, Metadata metadata, @Nu final String repoName = snapshot.getRepository(); if (tryEnterRepoLoop(repoName)) { if (repositoryData == null) { - repositoriesService.repository(repoName).getRepositoryData(new ActionListener<>() { - @Override - public void onResponse(RepositoryData repositoryData) { - finalizeSnapshotEntry(snapshot, metadata, repositoryData); - } + repositoriesService.repository(repoName) + .getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO contemplate threading here, do we need to fork, see #101445? + new ActionListener<>() { + @Override + public void onResponse(RepositoryData repositoryData) { + if (newFinalization) { + finalizeSnapshotEntry(snapshot, metadata, repositoryData); + } else { + runNextQueuedOperation(repositoryData, repoName, false); + } + } - @Override - public void onFailure(Exception e) { - submitUnbatchedTask("fail repo tasks for [" + repoName + "]", new FailPendingRepoTasksTask(repoName, e)); - } - }); + @Override + public void onFailure(Exception e) { + submitUnbatchedTask("fail repo tasks for [" + repoName + "]", new FailPendingRepoTasksTask(repoName, e)); + } + } + ); } else { - finalizeSnapshotEntry(snapshot, metadata, repositoryData); + if (newFinalization) { + finalizeSnapshotEntry(snapshot, metadata, repositoryData); + } else { + runNextQueuedOperation(repositoryData, repoName, false); + } } } else { if (newFinalization) { @@ -1306,6 +1328,7 @@ private void leaveRepoLoop(String repository) { private void finalizeSnapshotEntry(Snapshot snapshot, Metadata metadata, RepositoryData repositoryData) { assert currentlyFinalizing.contains(snapshot.getRepository()); + assert repositoryOperations.assertNotQueued(snapshot); try { SnapshotsInProgress.Entry entry = SnapshotsInProgress.get(clusterService.state()).snapshot(snapshot); final String failure = entry.failure(); @@ -1444,7 +1467,8 @@ public void onFailure(Exception e) { ); }, e -> handleFinalizationFailure(e, snapshot, repositoryData))); } catch (Exception e) { - assert false : new AssertionError(e); + logger.error(Strings.format("unexpected failure finalizing %s", snapshot), e); + assert false : new AssertionError("unexpected failure finalizing " + snapshot, e); handleFinalizationFailure(e, snapshot, repositoryData); } } @@ -2095,6 +2119,11 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) } } } + + @Override + public String toString() { + return Strings.format("delete snapshot task [%s]%s", repository, Arrays.toString(snapshotNames)); + } }, "delete snapshot [" + repository + "]" + Arrays.toString(snapshotNames), listener::onFailure); } @@ -2250,48 +2279,52 @@ private void executeConsistentStateUpdate( Consumer onFailure ) { final RepositoryMetadata repositoryMetadataStart = repository.getMetadata(); - repository.getRepositoryData(ActionListener.wrap(repositoryData -> { - final ClusterStateUpdateTask updateTask = createUpdateTask.apply(repositoryData); - submitUnbatchedTask(source, new ClusterStateUpdateTask(updateTask.priority(), updateTask.timeout()) { + repository.getRepositoryData( + // Listener is lightweight, only submits a cluster state update task, no need to fork + EsExecutors.DIRECT_EXECUTOR_SERVICE, + ActionListener.wrap(repositoryData -> { + final ClusterStateUpdateTask updateTask = createUpdateTask.apply(repositoryData); + submitUnbatchedTask(source, new ClusterStateUpdateTask(updateTask.priority(), updateTask.timeout()) { - private boolean executedTask = false; + private boolean executedTask = false; - @Override - public ClusterState execute(ClusterState currentState) throws Exception { - // Comparing the full metadata here on purpose instead of simply comparing the safe generation. - // If the safe generation has changed, then we have to reload repository data and start over. - // If the pending generation has changed we are in the midst of a write operation and might pick up the - // updated repository data and state on the retry. We don't want to wait for the write to finish though - // because it could fail for any number of reasons so we just retry instead of waiting on the cluster state - // to change in any form. - if (repositoryMetadataStart.equals( - RepositoriesMetadata.get(currentState).repository(repository.getMetadata().name()) - )) { - executedTask = true; - return updateTask.execute(currentState); + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + // Comparing the full metadata here on purpose instead of simply comparing the safe generation. + // If the safe generation has changed, then we have to reload repository data and start over. + // If the pending generation has changed we are in the midst of a write operation and might pick up the + // updated repository data and state on the retry. We don't want to wait for the write to finish though + // because it could fail for any number of reasons so we just retry instead of waiting on the cluster state + // to change in any form. + if (repositoryMetadataStart.equals( + RepositoriesMetadata.get(currentState).repository(repository.getMetadata().name()) + )) { + executedTask = true; + return updateTask.execute(currentState); + } + return currentState; } - return currentState; - } - @Override - public void onFailure(Exception e) { - if (executedTask) { - updateTask.onFailure(e); - } else { - onFailure.accept(e); + @Override + public void onFailure(Exception e) { + if (executedTask) { + updateTask.onFailure(e); + } else { + onFailure.accept(e); + } } - } - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - if (executedTask) { - updateTask.clusterStateProcessed(oldState, newState); - } else { - executeConsistentStateUpdate(repository, createUpdateTask, source, onFailure); + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + if (executedTask) { + updateTask.clusterStateProcessed(oldState, newState); + } else { + executeConsistentStateUpdate(repository, createUpdateTask, source, onFailure); + } } - } - }); - }, onFailure)); + }); + }, onFailure) + ); } /** Deletes snapshot from repository @@ -2690,6 +2723,11 @@ private static void markShardReassigned(RepositoryShardId shardId, Set()) + if (snapshotsToFinalize.getOrDefault(snapshot.getRepository(), new LinkedList<>()) .stream() - .noneMatch(entry -> entry.equals(snapshot)) : "Snapshot [" + snapshot + "] is still in finalization queue"; + .anyMatch(entry -> entry.equals(snapshot))) { + + final var assertionError = new AssertionError("[" + snapshot + "] should not be in " + snapshotsToFinalize); + logger.error("assertNotQueued failure", assertionError); + throw assertionError; + } return true; } diff --git a/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java b/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java index add994787227f..d9c37afa268ba 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java +++ b/server/src/main/java/org/elasticsearch/telemetry/TelemetryProvider.java @@ -8,14 +8,14 @@ package org.elasticsearch.telemetry; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.telemetry.tracing.Tracer; public interface TelemetryProvider { Tracer getTracer(); - Meter getMeter(); + MeterRegistry getMeterRegistry(); TelemetryProvider NOOP = new TelemetryProvider() { @@ -25,8 +25,8 @@ public Tracer getTracer() { } @Override - public Meter getMeter() { - return Meter.NOOP; + public MeterRegistry getMeterRegistry() { + return MeterRegistry.NOOP; } }; } diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java index 797c125900bb8..47244d16924c0 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleGauge.java @@ -8,22 +8,10 @@ package org.elasticsearch.telemetry.metric; -import java.util.Map; - /** - * Record non-additive double values. eg number of running threads, current load + * Record non-additive double values based on a callback. eg number of running threads, current load */ -public interface DoubleGauge extends Instrument { - /** - * Record the current value for measured item - */ - void record(double value); - - /** - * Record the current value - * @param attributes key-value pairs to associate with the current measurement - */ - void record(double value, Map attributes); +public interface DoubleGauge extends Instrument, AutoCloseable { /** * Noop gauge for tests @@ -35,12 +23,7 @@ public String getName() { } @Override - public void record(double value) { - - } - - @Override - public void record(double value, Map attributes) { + public void close() { } }; diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleWithAttributes.java b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleWithAttributes.java new file mode 100644 index 0000000000000..e342b6128998d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/DoubleWithAttributes.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +public record DoubleWithAttributes(double value, Map attributes) { + +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java index 71539064ce53e..a9387acd67434 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongGauge.java @@ -8,24 +8,10 @@ package org.elasticsearch.telemetry.metric; -import java.util.Map; - /** - * Record non-additive long values. + * Record non-additive long values based on a callback */ -public interface LongGauge extends Instrument { - - /** - * Record the current value of the measured item. - * @param value - */ - void record(long value); - - /** - * Record the current value - * @param attributes key-value pairs to associate with the current measurement - */ - void record(long value, Map attributes); +public interface LongGauge extends Instrument, AutoCloseable { /** * Noop gauge for tests @@ -37,12 +23,7 @@ public String getName() { } @Override - public void record(long value) { - - } - - @Override - public void record(long value, Map attributes) { + public void close() throws Exception { } }; diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/LongWithAttributes.java b/server/src/main/java/org/elasticsearch/telemetry/metric/LongWithAttributes.java new file mode 100644 index 0000000000000..eef880431fb83 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/LongWithAttributes.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.metric; + +import java.util.Map; + +public record LongWithAttributes(long value, Map attributes) { + +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java b/server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java similarity index 90% rename from server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java rename to server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java index 77bbf6f673fd3..6940795213603 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/metric/Meter.java +++ b/server/src/main/java/org/elasticsearch/telemetry/metric/MeterRegistry.java @@ -8,12 +8,14 @@ package org.elasticsearch.telemetry.metric; +import java.util.function.Supplier; + /** * Container for metering instruments. Meters with the same name and type (DoubleCounter, etc) can * only be registered once. * TODO(stu): describe name, unit and description */ -public interface Meter { +public interface MeterRegistry { /** * Register a {@link DoubleCounter}. The returned object may be reused. * @param name name of the counter @@ -51,9 +53,11 @@ public interface Meter { * @param name name of the gauge * @param description description of purpose * @param unit the unit (bytes, sec, hour) + * @param observer callback to use. This is called once during reporting period. + * Must not throw an exception and must be safe to call from different threads. * @return the registered meter. */ - DoubleGauge registerDoubleGauge(String name, String description, String unit); + DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer); /** * Retrieved a previously registered {@link DoubleGauge}. @@ -115,9 +119,11 @@ public interface Meter { * @param name name of the gauge * @param description description of purpose * @param unit the unit (bytes, sec, hour) + * @param observer callback to use. This is called once during reporting period. + * Must not throw an exception and must be safe to call from different threads. * @return the registered meter. */ - LongGauge registerLongGauge(String name, String description, String unit); + LongGauge registerLongGauge(String name, String description, String unit, Supplier observer); /** * Retrieved a previously registered {@link LongGauge}. @@ -145,7 +151,7 @@ public interface Meter { /** * Noop implementation for tests */ - Meter NOOP = new Meter() { + MeterRegistry NOOP = new MeterRegistry() { @Override public DoubleCounter registerDoubleCounter(String name, String description, String unit) { return DoubleCounter.NOOP; @@ -166,7 +172,7 @@ public DoubleUpDownCounter getDoubleUpDownCounter(String name) { } @Override - public DoubleGauge registerDoubleGauge(String name, String description, String unit) { + public DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer) { return DoubleGauge.NOOP; } @@ -206,7 +212,7 @@ public LongUpDownCounter getLongUpDownCounter(String name) { } @Override - public LongGauge registerLongGauge(String name, String description, String unit) { + public LongGauge registerLongGauge(String name, String description, String unit, Supplier observer) { return LongGauge.NOOP; } diff --git a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java index e1f6e561a047f..320b9cfdbf7e6 100644 --- a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -300,7 +301,7 @@ public void onFailure(Exception e) { DiscoveryNodeRole.roles(), new VersionInformation( Version.CURRENT.minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ) ); diff --git a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java index 869ec2453f411..0dcad9cf6864c 100644 --- a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java @@ -32,6 +32,7 @@ import org.elasticsearch.core.Booleans; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; @@ -497,7 +498,7 @@ public String toString() { private static DiscoveryNode resolveSeedNode(String clusterAlias, String address, String proxyAddress) { var seedVersion = new VersionInformation( Version.CURRENT.minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); if (proxyAddress == null || proxyAddress.isEmpty()) { diff --git a/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification new file mode 100644 index 0000000000000..699b48cd3b20b --- /dev/null +++ b/server/src/main/resources/META-INF/services/org.elasticsearch.features.FeatureSpecification @@ -0,0 +1,9 @@ +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the Server Side Public License, v 1; you may not use this file except +# in compliance with, at your election, the Elastic License 2.0 or the Server +# Side Public License, v 1. +# + +org.elasticsearch.features.FeaturesSupportedSpecification diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java index fea15a77c48b1..2193e07f84f38 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/configuration/TransportClearVotingConfigExclusionsActionTests.java @@ -199,7 +199,7 @@ public void testCannotClearVotingConfigurationWhenItIsDisabled() { } private TransportResponseHandler expectSuccess(Consumer onResponse) { - return responseHandler(onResponse, e -> { throw new AssertionError("unexpected", e); }); + return responseHandler(onResponse, ESTestCase::fail); } private TransportResponseHandler expectError(Consumer onException) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusResponseTests.java index 1488b6ac519f7..90caa846eee0a 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/migration/GetFeatureUpgradeStatusResponseTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.AbstractWireSerializingTestCase; import java.util.Collections; @@ -89,7 +90,7 @@ public void testUpgradeStatusCominations() { private static GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus createFeatureStatus() { return new GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus( randomAlphaOfLengthBetween(3, 20), - randomFrom(IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE), + randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE), randomFrom(org.elasticsearch.action.admin.cluster.migration.GetFeatureUpgradeStatusResponse.UpgradeStatus.values()), randomList(4, GetFeatureUpgradeStatusResponseTests::getIndexInfo) ); @@ -98,7 +99,7 @@ private static GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus createFeatur private static GetFeatureUpgradeStatusResponse.IndexInfo getIndexInfo() { return new GetFeatureUpgradeStatusResponse.IndexInfo( randomAlphaOfLengthBetween(3, 20), - randomFrom(IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE), + randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE), null ); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java index 3a14bbe10a56c..3e0460565dd61 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/info/NodeInfoTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.monitor.os.OsInfo; import org.elasticsearch.test.ESTestCase; @@ -45,7 +46,7 @@ public void testGetInfo() { Build.current(), DiscoveryNodeUtils.builder("test_node") .roles(emptySet()) - .version(VersionUtils.randomVersion(random()), IndexVersion.ZERO, IndexVersionUtils.randomCompatibleVersion(random())) + .version(VersionUtils.randomVersion(random()), IndexVersions.ZERO, IndexVersionUtils.randomCompatibleVersion(random())) .build(), null, null, diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/shutdown/TransportPrevalidateNodeRemovalActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/shutdown/TransportPrevalidateNodeRemovalActionTests.java index 1bd0ddfbdd645..d6e48104620cd 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/shutdown/TransportPrevalidateNodeRemovalActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/shutdown/TransportPrevalidateNodeRemovalActionTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -63,7 +63,7 @@ private DiscoveryNode randomNode(String nodeName, String nodeId) { return DiscoveryNodeUtils.builder(nodeId) .name(nodeName) .roles(emptySet()) - .version(randomVersion(random()), IndexVersion.ZERO, IndexVersionUtils.randomVersion()) + .version(randomVersion(random()), IndexVersions.ZERO, IndexVersionUtils.randomVersion()) .build(); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 3da303393861d..217cb8e3334e3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -34,7 +34,7 @@ import org.elasticsearch.http.HttpStats; import org.elasticsearch.http.HttpStatsTests; import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.bulk.stats.BulkStats; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.request.RequestCacheStats; @@ -664,7 +664,7 @@ private static ShardStats createShardStats(ShardId shardId) { public static NodeStats createNodeStats() { DiscoveryNode node = DiscoveryNodeUtils.builder("test_node") .roles(emptySet()) - .version(VersionUtils.randomVersion(random()), IndexVersion.ZERO, IndexVersionUtils.randomVersion()) + .version(VersionUtils.randomVersion(random()), IndexVersions.ZERO, IndexVersionUtils.randomVersion()) .build(); NodeIndicesStats nodeIndicesStats = null; if (frequently()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java index dc2578b835de2..b593c947fa725 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/remote/RemoteClusterNodesActionTests.java @@ -46,6 +46,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -114,6 +115,7 @@ protected void Request request, ActionListener listener ) { + assertThat(threadContext.isSystemContext(), is(true)); assertSame(TransportNodesInfoAction.TYPE, action); assertThat( asInstanceOf(NodesInfoRequest.class, request).requestedMetrics(), @@ -128,7 +130,10 @@ public void close() {} ); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), RemoteClusterNodesAction.Request.REMOTE_CLUSTER_SERVER_NODES, future); + action.doExecute(mock(Task.class), RemoteClusterNodesAction.Request.REMOTE_CLUSTER_SERVER_NODES, ActionListener.wrap(response -> { + assertThat(threadContext.isSystemContext(), is(false)); + future.onResponse(response); + }, future::onFailure)); final List actualNodes = future.actionGet().getNodes(); assertThat(Set.copyOf(actualNodes), equalTo(expectedRemoteServerNodes)); @@ -191,6 +196,7 @@ protected void Request request, ActionListener listener ) { + assertThat(threadContext.isSystemContext(), is(true)); assertSame(TransportNodesInfoAction.TYPE, action); assertThat(asInstanceOf(NodesInfoRequest.class, request).requestedMetrics(), empty()); listener.onResponse((Response) nodesInfoResponse); @@ -202,7 +208,10 @@ public void close() {} ); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(mock(Task.class), RemoteClusterNodesAction.Request.ALL_NODES, future); + action.doExecute(mock(Task.class), RemoteClusterNodesAction.Request.ALL_NODES, ActionListener.wrap(response -> { + assertThat(threadContext.isSystemContext(), is(false)); + future.onResponse(response); + }, future::onFailure)); final List actualNodes = future.actionGet().getNodes(); assertThat(Set.copyOf(actualNodes), equalTo(expectedRemoteNodes)); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java index 9c61c5d5eeedd..70c33283c7475 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; import org.elasticsearch.test.ESTestCase; @@ -139,6 +140,12 @@ public void testToXContentWithDeprecatedClusterState() { } } ], + "nodes_features": [ + { + "node_id": "node0", + "features": [] + } + ], "metadata": { "cluster_uuid": "_na_", "cluster_uuid_committed": false, @@ -204,7 +211,7 @@ public void testToXContentWithDeprecatedClusterState() { clusterState.stateUUID(), clusterState.getNodes().get("node0").getEphemeralId(), Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), IndexVersion.current() ), @@ -291,7 +298,7 @@ private void assertXContent( ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, params); assertEquals(XContentHelper.stripWhitespace(expectedBody), XContentHelper.stripWhitespace(Strings.toString(builder))); } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } final var expectedChunks = Objects.equals(params.param("metric"), "none") diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java index e919b4aedf38b..25642c66f11a7 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.RandomQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchModule; @@ -54,7 +55,7 @@ public void testSerialization() throws Exception { clusterSearchShardsGroups[i] = new ClusterSearchShardsGroup(shardId, new ShardRouting[] { shardRouting }); DiscoveryNodeUtils.Builder node = DiscoveryNodeUtils.builder(shardRouting.currentNodeId()) .address(new TransportAddress(TransportAddress.META_ADDRESS, randomInt(0xFFFF))) - .version(randomCompatibleVersion(random(), Version.CURRENT), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()); + .version(randomCompatibleVersion(random(), Version.CURRENT), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()); nodes.add(node.build()); AliasFilter aliasFilter; if (randomBoolean()) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java index 3613718b9dc2f..49528c204b042 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/stats/VersionStatsTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; @@ -53,7 +54,7 @@ protected VersionStats mutateInstance(VersionStats instance) { return new VersionStats(instance.versionStats().stream().map(svs -> { return switch (randomIntBetween(1, 4)) { case 1 -> new VersionStats.SingleVersionStats( - IndexVersion.V_7_3_0, + IndexVersions.V_7_3_0, svs.indexCount, svs.primaryShardCount, svs.totalPrimaryByteCount @@ -88,12 +89,12 @@ public void testCreation() { metadata = new Metadata.Builder().put(indexMeta("foo", IndexVersion.current(), 4), true) .put(indexMeta("bar", IndexVersion.current(), 3), true) - .put(indexMeta("baz", IndexVersion.V_7_0_0, 2), true) + .put(indexMeta("baz", IndexVersions.V_7_0_0, 2), true) .build(); stats = VersionStats.of(metadata, Collections.emptyList()); assertThat(stats.versionStats().size(), equalTo(2)); VersionStats.SingleVersionStats s1 = new VersionStats.SingleVersionStats(IndexVersion.current(), 2, 7, 0); - VersionStats.SingleVersionStats s2 = new VersionStats.SingleVersionStats(IndexVersion.V_7_0_0, 1, 2, 0); + VersionStats.SingleVersionStats s2 = new VersionStats.SingleVersionStats(IndexVersions.V_7_0_0, 1, 2, 0); assertThat(stats.versionStats(), containsInAnyOrder(s1, s2)); ShardId shardId = new ShardId("bar", "uuid", 0); @@ -132,7 +133,7 @@ public void testCreation() { stats = VersionStats.of(metadata, Collections.singletonList(nodeResponse)); assertThat(stats.versionStats().size(), equalTo(2)); s1 = new VersionStats.SingleVersionStats(IndexVersion.current(), 2, 7, 100); - s2 = new VersionStats.SingleVersionStats(IndexVersion.V_7_0_0, 1, 2, 0); + s2 = new VersionStats.SingleVersionStats(IndexVersions.V_7_0_0, 1, 2, 0); assertThat(stats.versionStats(), containsInAnyOrder(s1, s2)); } @@ -141,7 +142,7 @@ private static IndexMetadata indexMeta(String name, IndexVersion version, int pr } public static VersionStats randomInstance() { - List versions = List.of(IndexVersion.current(), IndexVersion.V_7_0_0, IndexVersion.V_7_1_0, IndexVersion.V_7_2_0); + List versions = List.of(IndexVersion.current(), IndexVersions.V_7_0_0, IndexVersions.V_7_1_0, IndexVersions.V_7_2_0); List stats = new ArrayList<>(); for (IndexVersion v : versions) { VersionStats.SingleVersionStats s = new VersionStats.SingleVersionStats( diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java index 76db89c138682..e2c71f3b20084 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.index.VersionType; import org.elasticsearch.indices.EmptySystemIndices; @@ -111,7 +112,7 @@ public void setUp() throws Exception { DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("node") .version( VersionUtils.randomCompatibleVersion(random(), Version.CURRENT), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ) .build(); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java index 7e9556be18f6d..d4c5fc09e821f 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionTookTests.java @@ -29,7 +29,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.indices.EmptySystemIndices; import org.elasticsearch.tasks.Task; @@ -81,7 +81,7 @@ public void setUp() throws Exception { DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("node") .version( VersionUtils.randomCompatibleVersion(random(), Version.CURRENT), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ) .build(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/RequestDispatcherTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/RequestDispatcherTests.java index ef1786892630a..75d5d7fb7c55d 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/RequestDispatcherTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/RequestDispatcherTests.java @@ -42,7 +42,7 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.index.shard.ShardId; @@ -113,7 +113,7 @@ public void testHappyCluster() throws Exception { Metadata.Builder metadata = Metadata.builder(); for (String index : allIndices) { metadata.put( - IndexMetadata.builder(index).settings(indexSettings(IndexVersion.MINIMUM_COMPATIBLE, between(1, 10), between(0, 2))) + IndexMetadata.builder(index).settings(indexSettings(IndexVersions.MINIMUM_COMPATIBLE, between(1, 10), between(0, 2))) ); } clusterState = newClusterState(metadata.build(), discoNodes.build()); @@ -182,7 +182,7 @@ public void testRetryThenOk() throws Exception { Metadata.Builder metadata = Metadata.builder(); for (String index : allIndices) { metadata.put( - IndexMetadata.builder(index).settings(indexSettings(IndexVersion.MINIMUM_COMPATIBLE, between(1, 10), between(1, 3))) + IndexMetadata.builder(index).settings(indexSettings(IndexVersions.MINIMUM_COMPATIBLE, between(1, 10), between(1, 3))) ); } clusterState = newClusterState(metadata.build(), discoNodes.build()); @@ -302,7 +302,7 @@ public void testRetryButFails() throws Exception { Metadata.Builder metadata = Metadata.builder(); for (String index : allIndices) { metadata.put( - IndexMetadata.builder(index).settings(indexSettings(IndexVersion.MINIMUM_COMPATIBLE, between(1, 10), between(0, 3))) + IndexMetadata.builder(index).settings(indexSettings(IndexVersions.MINIMUM_COMPATIBLE, between(1, 10), between(0, 3))) ); } clusterState = newClusterState(metadata.build(), discoNodes.build()); @@ -424,7 +424,7 @@ public void testSuccessWithAnyMatch() throws Exception { Metadata.Builder metadata = Metadata.builder(); for (String index : allIndices) { metadata.put( - IndexMetadata.builder(index).settings(indexSettings(IndexVersion.MINIMUM_COMPATIBLE, between(2, 10), between(0, 2))) + IndexMetadata.builder(index).settings(indexSettings(IndexVersions.MINIMUM_COMPATIBLE, between(2, 10), between(0, 2))) ); } clusterState = newClusterState(metadata.build(), discoNodes.build()); @@ -520,7 +520,7 @@ public void testStopAfterAllShardsUnmatched() throws Exception { Metadata.Builder metadata = Metadata.builder(); for (String index : allIndices) { metadata.put( - IndexMetadata.builder(index).settings(indexSettings(IndexVersion.MINIMUM_COMPATIBLE, between(1, 10), between(0, 2))) + IndexMetadata.builder(index).settings(indexSettings(IndexVersions.MINIMUM_COMPATIBLE, between(1, 10), between(0, 2))) ); } clusterState = newClusterState(metadata.build(), discoNodes.build()); diff --git a/server/src/test/java/org/elasticsearch/action/get/GetFromTranslogResponseSerializationTests.java b/server/src/test/java/org/elasticsearch/action/get/GetFromTranslogResponseSerializationTests.java index 4c1d3c14f4956..1a6cc70ba08ad 100644 --- a/server/src/test/java/org/elasticsearch/action/get/GetFromTranslogResponseSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/GetFromTranslogResponseSerializationTests.java @@ -24,26 +24,39 @@ protected Writeable.Reader instanceRead @Override protected TransportGetFromTranslogAction.Response createTestInstance() { - return new TransportGetFromTranslogAction.Response(randomGetResult(), randomSegmentGeneration()); + return new TransportGetFromTranslogAction.Response(randomGetResult(), randomPrimaryTerm(), randomSegmentGeneration()); } @Override protected TransportGetFromTranslogAction.Response mutateInstance(TransportGetFromTranslogAction.Response instance) throws IOException { - return randomBoolean() - ? new TransportGetFromTranslogAction.Response( - instance.getResult(), - randomValueOtherThan(instance.segmentGeneration(), this::randomSegmentGeneration) - ) - : new TransportGetFromTranslogAction.Response( + return switch (randomInt(2)) { + case 0 -> new TransportGetFromTranslogAction.Response( randomValueOtherThan(instance.getResult(), this::randomGetResult), + instance.primaryTerm(), + instance.segmentGeneration() + ); + case 1 -> new TransportGetFromTranslogAction.Response( + instance.getResult(), + randomValueOtherThan(instance.primaryTerm(), this::randomPrimaryTerm), instance.segmentGeneration() ); + case 2 -> new TransportGetFromTranslogAction.Response( + instance.getResult(), + instance.primaryTerm(), + randomValueOtherThan(instance.segmentGeneration(), this::randomSegmentGeneration) + ); + default -> randomValueOtherThan(instance, this::createTestInstance); + }; } private long randomSegmentGeneration() { return randomBoolean() ? -1L : randomNonNegativeLong(); } + private long randomPrimaryTerm() { + return randomNonNegativeLong(); + } + private GetResult randomGetResult() { return randomBoolean() ? null : GetResultTests.randomGetResult(randomFrom(XContentType.values())).v1(); } diff --git a/server/src/test/java/org/elasticsearch/action/get/ShardMultiGetFromTranslogResponseSerializationTests.java b/server/src/test/java/org/elasticsearch/action/get/ShardMultiGetFromTranslogResponseSerializationTests.java index c3f87ee9ce26d..402566394f61b 100644 --- a/server/src/test/java/org/elasticsearch/action/get/ShardMultiGetFromTranslogResponseSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/ShardMultiGetFromTranslogResponseSerializationTests.java @@ -28,26 +28,39 @@ protected Writeable.Reader instanceReader() { @Override protected Response createTestInstance() { - return new Response(randomMultiGetShardResponse(), randomSegmentGeneration()); + return new Response(randomMultiGetShardResponse(), randomPrimaryTerm(), randomSegmentGeneration()); } @Override protected Response mutateInstance(Response instance) throws IOException { - return randomBoolean() - ? new Response( - instance.multiGetShardResponse(), - randomValueOtherThan(instance.segmentGeneration(), this::randomSegmentGeneration) - ) - : new Response( + return switch (randomInt(2)) { + case 0 -> new Response( randomValueOtherThan(instance.multiGetShardResponse(), this::randomMultiGetShardResponse), + instance.primaryTerm(), + instance.segmentGeneration() + ); + case 1 -> new Response( + instance.multiGetShardResponse(), + randomValueOtherThan(instance.primaryTerm(), this::randomPrimaryTerm), instance.segmentGeneration() ); + case 2 -> new Response( + instance.multiGetShardResponse(), + instance.primaryTerm(), + randomValueOtherThan(instance.segmentGeneration(), this::randomSegmentGeneration) + ); + default -> randomValueOtherThan(instance, this::createTestInstance); + }; } private long randomSegmentGeneration() { return randomBoolean() ? -1L : randomNonNegativeLong(); } + private long randomPrimaryTerm() { + return randomNonNegativeLong(); + } + private GetResponse randomGetResponse() { return randomBoolean() ? null : new GetResponse(GetResultTests.randomGetResult(randomFrom(XContentType.values())).v1()); } diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 3af46aea79319..7166664181eb1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -442,7 +442,7 @@ public void sendCanMatch( shardsIter, timeProvider, null, - true, + shardsIter.size() > shardToSkip.size(), EMPTY_CONTEXT_PROVIDER, ActionTestUtils.assertNoFailureListener(iter -> { result.set(iter); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchShardsResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchShardsResponseTests.java index 6d9f3dfaefcf0..8924625aedcdd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchShardsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchShardsResponseTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.query.RandomQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; @@ -113,11 +114,11 @@ protected SearchShardsResponse mutateInstance(SearchShardsResponse r) throws IOE public void testLegacyResponse() { DiscoveryNode node1 = DiscoveryNodeUtils.builder("node-1") .address(new TransportAddress(TransportAddress.META_ADDRESS, randomInt(0xFFFF))) - .version(randomCompatibleVersion(random(), Version.CURRENT), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(randomCompatibleVersion(random(), Version.CURRENT), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); DiscoveryNode node2 = DiscoveryNodeUtils.builder("node-2") .address(new TransportAddress(TransportAddress.META_ADDRESS, randomInt(0xFFFF))) - .version(randomCompatibleVersion(random(), Version.CURRENT), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(randomCompatibleVersion(random(), Version.CURRENT), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); final ClusterSearchShardsGroup[] groups = new ClusterSearchShardsGroup[2]; { diff --git a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java index 2a0391a57eba6..d2acf8f397f2f 100644 --- a/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/SubscribableListenerTests.java @@ -494,7 +494,7 @@ private static void assertComplete(SubscribableListener listener, @Nulla try { listener.rawResult(); } catch (Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } } else { assertEquals(expectedFailureMessage, expectThrows(ElasticsearchException.class, listener::rawResult).getMessage()); diff --git a/server/src/test/java/org/elasticsearch/action/support/ThreadedActionListenerTests.java b/server/src/test/java/org/elasticsearch/action/support/ThreadedActionListenerTests.java index eef1882676cfd..b5f07b7d8d087 100644 --- a/server/src/test/java/org/elasticsearch/action/support/ThreadedActionListenerTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/ThreadedActionListenerTests.java @@ -83,7 +83,7 @@ public void onFailure(Exception e) { e = elasticsearchException; assertNull(e.getCause()); } else { - throw new AssertionError("unexpected", e); + fail(e); } } @@ -91,7 +91,7 @@ public void onFailure(Exception e) { assertEquals("simulated", e.getMessage()); assertEquals(0, e.getSuppressed().length); } else { - throw new AssertionError("unexpected", e); + fail(e); } } diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java index 4f926b14a1904..25d9f9fec884b 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/TransportWriteActionTests.java @@ -103,7 +103,7 @@ public void initCommonMocks() { indexShard = mock(IndexShard.class); location = mock(Translog.Location.class); clusterService = createClusterService(threadPool); - when(indexShard.refresh(any())).thenReturn(new Engine.RefreshResult(true, 1)); + when(indexShard.refresh(any())).thenReturn(new Engine.RefreshResult(true, randomNonNegativeLong(), 1)); ReplicationGroup replicationGroup = mock(ReplicationGroup.class); when(indexShard.getReplicationGroup()).thenReturn(replicationGroup); when(replicationGroup.getReplicationTargets()).thenReturn(Collections.emptyList()); @@ -199,7 +199,7 @@ public void testReplicaImmediateRefresh() throws Exception { verify(indexShard).externalRefresh(eq(PostWriteRefresh.FORCED_REFRESH_AFTER_INDEX), refreshListener.capture()); verify(indexShard, never()).addRefreshListener(any(), any()); // Fire the listener manually - refreshListener.getValue().onResponse(new Engine.RefreshResult(randomBoolean(), randomNonNegativeLong())); + refreshListener.getValue().onResponse(new Engine.RefreshResult(randomBoolean(), randomNonNegativeLong(), randomNonNegativeLong())); assertNotNull(listener.response); assertNull(listener.failure); } diff --git a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java index e92dd5f11f106..f140e624cc674 100644 --- a/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/internal/ParentTaskAssigningClientTests.java @@ -36,6 +36,8 @@ protected void } }; try (ParentTaskAssigningClient client = new ParentTaskAssigningClient(mock, parentTaskId[0])) { + assertEquals(parentTaskId[0], client.getParentTask()); + // All of these should have the parentTaskId set client.bulk(new BulkRequest()); client.search(new SearchRequest()); diff --git a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java index 46c6d1db47a7c..56c82ae12dc45 100644 --- a/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/ClusterStateTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.health.metadata.HealthMetadata; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.SystemIndices; @@ -63,6 +64,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import static java.util.Collections.emptySet; @@ -223,6 +225,12 @@ public void testToXContent() throws IOException { } } ], + "nodes_features" : [ + { + "node_id" : "nodeId1", + "features" : [ "f1", "f2" ] + } + ], "metadata": { "cluster_uuid": "clusterUUID", "cluster_uuid_committed": false, @@ -373,7 +381,7 @@ public void testToXContent() throws IOException { }""", ephemeralId, Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), TransportVersion.current(), IndexVersion.current(), @@ -484,6 +492,15 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti } } ], + "nodes_features" : [ + { + "node_id" : "nodeId1", + "features" : [ + "f1", + "f2" + ] + } + ], "metadata" : { "cluster_uuid" : "clusterUUID", "cluster_uuid_committed" : false, @@ -630,7 +647,7 @@ public void testToXContent_FlatSettingTrue_ReduceMappingFalse() throws IOExcepti }""", ephemeralId, Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), TransportVersion.current(), IndexVersion.current(), @@ -741,6 +758,15 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti } } ], + "nodes_features" : [ + { + "node_id" : "nodeId1", + "features" : [ + "f1", + "f2" + ] + } + ], "metadata" : { "cluster_uuid" : "clusterUUID", "cluster_uuid_committed" : false, @@ -893,7 +919,7 @@ public void testToXContent_FlatSettingFalse_ReduceMappingTrue() throws IOExcepti }""", ephemeralId, Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), TransportVersion.current(), IndexVersion.current(), @@ -954,6 +980,7 @@ public void testToXContentSameTypeName() throws IOException { "blocks" : { }, "nodes" : { }, "nodes_versions" : [ ], + "nodes_features" : [ ], "metadata" : { "cluster_uuid" : "clusterUUID", "cluster_uuid_committed" : false, @@ -1015,6 +1042,46 @@ public void testToXContentSameTypeName() throws IOException { }""", IndexVersion.current()), Strings.toString(builder)); } + public void testNodeFeaturesSorted() throws IOException { + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodeFeatures(Map.of("node2", Set.of("nf1", "f2", "nf2"), "node1", Set.of("f3", "f2", "f1"), "node3", Set.of())) + .build(); + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + builder.startObject(); + writeChunks(clusterState, builder, new ToXContent.MapParams(Map.of("metric", ClusterState.Metric.NODES.toString()))); + builder.endObject(); + + assertThat(Strings.toString(builder), equalTo(""" + { + "cluster_uuid" : "_na_", + "nodes" : { }, + "nodes_versions" : [ ], + "nodes_features" : [ + { + "node_id" : "node1", + "features" : [ + "f1", + "f2", + "f3" + ] + }, + { + "node_id" : "node2", + "features" : [ + "f2", + "nf1", + "nf2" + ] + }, + { + "node_id" : "node3", + "features" : [ ] + } + ] + }""")); + } + private ClusterState buildClusterState() throws IOException { IndexMetadata indexMetadata = IndexMetadata.builder("index") .state(IndexMetadata.State.OPEN) @@ -1056,6 +1123,7 @@ private ClusterState buildClusterState() throws IOException { new CompatibilityVersions(TransportVersion.current(), Map.of(".tasks", new SystemIndexDescriptor.MappingsVersion(1, 1))) ) ) + .nodeFeatures(Map.of("nodeId1", Set.of("f1", "f2"))) .blocks( ClusterBlocks.builder() .addGlobalBlock( @@ -1256,9 +1324,11 @@ public static int expectedChunkCount(ToXContent.Params params, ClusterState clus chunkCount += 2 + clusterState.blocks().indices().size(); } - // nodes, nodes_versions + // nodes, nodes_versions, nodes_features if (metrics.contains(ClusterState.Metric.NODES)) { - chunkCount += 4 + clusterState.nodes().size() + clusterState.compatibilityVersions().size(); + chunkCount += 7 + clusterState.nodes().size() + clusterState.compatibilityVersions().size() + clusterState.clusterFeatures() + .nodeFeatures() + .size(); } // metadata diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java index 97a2e4f7ad35c..ee5b8b652d2d9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/FollowersCheckerTests.java @@ -839,7 +839,7 @@ public void handleResponse() { @Override public void handleException(TransportException exp) { - throw new AssertionError("unexpected", exp); + fail(exp); } public boolean succeeded() { diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java index 4877ece7712bd..50bbbad05a778 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinHelperTests.java @@ -91,7 +91,8 @@ public void testJoinDeduplication() { new NoneCircuitBreakerService(), Function.identity(), (listener, term) -> listener.onResponse(null), - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); transportService.start(); @@ -258,7 +259,8 @@ public void testJoinFailureOnUnhealthyNodes() { new NoneCircuitBreakerService(), Function.identity(), (listener, term) -> listener.onResponse(null), - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); transportService.start(); @@ -334,12 +336,13 @@ public void testLatestStoredStateFailure() { new NoneCircuitBreakerService(), Function.identity(), (listener, term) -> listener.onFailure(new ElasticsearchException("simulated")), - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); final var joinAccumulator = joinHelper.new CandidateJoinAccumulator(); final var joinListener = new PlainActionFuture(); - joinAccumulator.handleJoinRequest(localNode, CompatibilityVersionsUtils.staticCurrent(), joinListener); + joinAccumulator.handleJoinRequest(localNode, CompatibilityVersionsUtils.staticCurrent(), Set.of(), joinListener); assert joinListener.isDone() == false; final var mockAppender = new MockLogAppender(); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LinearizabilityCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LinearizabilityCheckerTests.java index d6df8e26721c0..34fe7eae32fcb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LinearizabilityCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LinearizabilityCheckerTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.cluster.coordination.LinearizabilityChecker.History; import org.elasticsearch.cluster.coordination.LinearizabilityChecker.KeyedSpec; +import org.elasticsearch.cluster.coordination.LinearizabilityChecker.LinearizabilityCheckAborted; import org.elasticsearch.cluster.coordination.LinearizabilityChecker.SequentialSpec; import org.elasticsearch.core.Tuple; import org.elasticsearch.test.ESTestCase; @@ -58,7 +59,7 @@ public void testLockConsistent() { assertThat(lockSpec.nextState(true, null, true), equalTo(Optional.empty())); } - public void testLockWithLinearizableHistory1() { + public void testLockWithLinearizableHistory1() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(null); // 0: acquire lock history.respond(call0, true); // 0: lock acquisition succeeded @@ -67,7 +68,7 @@ public void testLockWithLinearizableHistory1() { assertTrue(LinearizabilityChecker.isLinearizable(lockSpec, history)); } - public void testLockWithLinearizableHistory2() { + public void testLockWithLinearizableHistory2() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(null); // 0: acquire lock int call1 = history.invoke(null); // 1: acquire lock @@ -76,7 +77,7 @@ public void testLockWithLinearizableHistory2() { assertTrue(LinearizabilityChecker.isLinearizable(lockSpec, history)); } - public void testLockWithLinearizableHistory3() { + public void testLockWithLinearizableHistory3() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(null); // 0: acquire lock int call1 = history.invoke(null); // 1: acquire lock @@ -85,7 +86,7 @@ public void testLockWithLinearizableHistory3() { assertTrue(LinearizabilityChecker.isLinearizable(lockSpec, history)); } - public void testLockWithNonLinearizableHistory() { + public void testLockWithNonLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(null); // 0: acquire lock history.respond(call0, false); // 0: lock acquisition failed @@ -127,7 +128,7 @@ public void testRegisterConsistent() { assertThat(registerSpec.nextState(7, null, 42), equalTo(Optional.empty())); } - public void testRegisterWithLinearizableHistory() { + public void testRegisterWithLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(42); // 0: invoke write 42 int call1 = history.invoke(null); // 1: invoke read @@ -135,14 +136,14 @@ public void testRegisterWithLinearizableHistory() { history.respond(call2, 0); // 2: read returns 0 history.respond(call1, 42); // 1: read returns 42 - expectThrows(IllegalArgumentException.class, () -> LinearizabilityChecker.isLinearizable(registerSpec, history)); + expectThrows(AssertionError.class, () -> LinearizabilityChecker.isLinearizable(registerSpec, history)); assertTrue(LinearizabilityChecker.isLinearizable(registerSpec, history, i -> null)); history.respond(call0, null); // 0: write returns assertTrue(LinearizabilityChecker.isLinearizable(registerSpec, history)); } - public void testRegisterWithNonLinearizableHistory() { + public void testRegisterWithNonLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(42); // 0: invoke write 42 int call1 = history.invoke(null); // 1: invoke read @@ -150,14 +151,14 @@ public void testRegisterWithNonLinearizableHistory() { int call2 = history.invoke(null); // 2: invoke read history.respond(call2, 0); // 2: read returns 0, not allowed - expectThrows(IllegalArgumentException.class, () -> LinearizabilityChecker.isLinearizable(registerSpec, history)); + expectThrows(AssertionError.class, () -> LinearizabilityChecker.isLinearizable(registerSpec, history)); assertFalse(LinearizabilityChecker.isLinearizable(registerSpec, history, i -> null)); history.respond(call0, null); // 0: write returns assertFalse(LinearizabilityChecker.isLinearizable(registerSpec, history)); } - public void testRegisterObservedSequenceOfUpdatesWitLinearizableHistory() { + public void testRegisterObservedSequenceOfUpdatesWitLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(42); // 0: invoke write 42 int call1 = history.invoke(43); // 1: invoke write 43 @@ -174,7 +175,7 @@ public void testRegisterObservedSequenceOfUpdatesWitLinearizableHistory() { assertTrue(LinearizabilityChecker.isLinearizable(registerSpec, history)); } - public void testRegisterObservedSequenceOfUpdatesWithNonLinearizableHistory() { + public void testRegisterObservedSequenceOfUpdatesWithNonLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int call0 = history.invoke(42); // 0: invoke write 42 int call1 = history.invoke(43); // 1: invoke write 43 @@ -214,7 +215,7 @@ public Optional nextState(Object currentState, Object input, Object outp } }; - public void testMultiRegisterWithLinearizableHistory() { + public void testMultiRegisterWithLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int callX0 = history.invoke(new Tuple<>("x", 42)); // 0: invoke write 42 on key x int callX1 = history.invoke(new Tuple<>("x", null)); // 1: invoke read on key x @@ -227,7 +228,7 @@ public void testMultiRegisterWithLinearizableHistory() { history.respond(callY1, 42); // 1: read returns 42 on key y history.respond(callX1, 42); // 1: read returns 42 on key x - expectThrows(IllegalArgumentException.class, () -> LinearizabilityChecker.isLinearizable(multiRegisterSpec, history)); + expectThrows(AssertionError.class, () -> LinearizabilityChecker.isLinearizable(multiRegisterSpec, history)); assertTrue(LinearizabilityChecker.isLinearizable(multiRegisterSpec, history, i -> null)); history.respond(callX0, null); // 0: write returns on key x @@ -235,7 +236,7 @@ public void testMultiRegisterWithLinearizableHistory() { assertTrue(LinearizabilityChecker.isLinearizable(multiRegisterSpec, history)); } - public void testMultiRegisterWithNonLinearizableHistory() { + public void testMultiRegisterWithNonLinearizableHistory() throws LinearizabilityCheckAborted { final History history = new History(); int callX0 = history.invoke(new Tuple<>("x", 42)); // 0: invoke write 42 on key x int callX1 = history.invoke(new Tuple<>("x", null)); // 1: invoke read on key x @@ -248,7 +249,7 @@ public void testMultiRegisterWithNonLinearizableHistory() { history.respond(callY2, 0); // 2: read returns 0 on key y, not allowed history.respond(callX1, 42); // 1: read returns 42 on key x - expectThrows(IllegalArgumentException.class, () -> LinearizabilityChecker.isLinearizable(multiRegisterSpec, history)); + expectThrows(AssertionError.class, () -> LinearizabilityChecker.isLinearizable(multiRegisterSpec, history)); assertFalse(LinearizabilityChecker.isLinearizable(multiRegisterSpec, history, i -> null)); history.respond(callX0, null); // 0: write returns on key x diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java index f779d5ea56dfa..7a79dc44c29fd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/MessagesTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; -import org.elasticsearch.test.EqualsHashCodeTestUtils.CopyFunction; import org.elasticsearch.test.TransportVersionUtils; import java.util.Map; @@ -36,59 +35,56 @@ public void testJoinEqualsHashCodeSerialization() { randomNonNegativeLong(), randomNonNegativeLong() ); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialJoin, - (CopyFunction) join -> copyWriteable(join, writableRegistry(), Join::new), - join -> { - return switch (randomInt(4)) { - case 0 -> - // change sourceNode - new Join( - createNode(randomAlphaOfLength(20)), - join.getTargetNode(), - join.getTerm(), - join.getLastAcceptedTerm(), - join.getLastAcceptedVersion() - ); - case 1 -> - // change targetNode - new Join( - join.getSourceNode(), - createNode(randomAlphaOfLength(20)), - join.getTerm(), - join.getLastAcceptedTerm(), - join.getLastAcceptedVersion() - ); - case 2 -> - // change term - new Join( - join.getSourceNode(), - join.getTargetNode(), - randomValueOtherThan(join.getTerm(), ESTestCase::randomNonNegativeLong), - join.getLastAcceptedTerm(), - join.getLastAcceptedVersion() - ); - case 3 -> - // change last accepted term - new Join( - join.getSourceNode(), - join.getTargetNode(), - join.getTerm(), - randomValueOtherThan(join.getLastAcceptedTerm(), ESTestCase::randomNonNegativeLong), - join.getLastAcceptedVersion() - ); - case 4 -> - // change version - new Join( - join.getSourceNode(), - join.getTargetNode(), - join.getTerm(), - join.getLastAcceptedTerm(), - randomValueOtherThan(join.getLastAcceptedVersion(), ESTestCase::randomNonNegativeLong) - ); - default -> throw new AssertionError(); - }; + join -> copyWriteable(join, writableRegistry(), Join::new), + join -> switch (randomInt(4)) { + case 0 -> + // change sourceNode + new Join( + createNode(randomAlphaOfLength(20)), + join.getTargetNode(), + join.getTerm(), + join.getLastAcceptedTerm(), + join.getLastAcceptedVersion() + ); + case 1 -> + // change targetNode + new Join( + join.getSourceNode(), + createNode(randomAlphaOfLength(20)), + join.getTerm(), + join.getLastAcceptedTerm(), + join.getLastAcceptedVersion() + ); + case 2 -> + // change term + new Join( + join.getSourceNode(), + join.getTargetNode(), + randomValueOtherThan(join.getTerm(), ESTestCase::randomNonNegativeLong), + join.getLastAcceptedTerm(), + join.getLastAcceptedVersion() + ); + case 3 -> + // change last accepted term + new Join( + join.getSourceNode(), + join.getTargetNode(), + join.getTerm(), + randomValueOtherThan(join.getLastAcceptedTerm(), ESTestCase::randomNonNegativeLong), + join.getLastAcceptedVersion() + ); + case 4 -> + // change version + new Join( + join.getSourceNode(), + join.getTargetNode(), + join.getTerm(), + join.getLastAcceptedTerm(), + randomValueOtherThan(join.getLastAcceptedVersion(), ESTestCase::randomNonNegativeLong) + ); + default -> throw new AssertionError(); } ); } @@ -104,26 +100,23 @@ public void testPublishRequestEqualsHashCode() { public void testPublishResponseEqualsHashCodeSerialization() { PublishResponse initialPublishResponse = new PublishResponse(randomNonNegativeLong(), randomNonNegativeLong()); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialPublishResponse, - (CopyFunction) publishResponse -> copyWriteable(publishResponse, writableRegistry(), PublishResponse::new), - publishResponse -> { - return switch (randomInt(1)) { - case 0 -> - // change term - new PublishResponse( - randomValueOtherThan(publishResponse.getTerm(), ESTestCase::randomNonNegativeLong), - publishResponse.getVersion() - ); - case 1 -> - // change version - new PublishResponse( - publishResponse.getTerm(), - randomValueOtherThan(publishResponse.getVersion(), ESTestCase::randomNonNegativeLong) - ); - default -> throw new AssertionError(); - }; + publishResponse -> copyWriteable(publishResponse, writableRegistry(), PublishResponse::new), + publishResponse -> switch (randomInt(1)) { + case 0 -> + // change term + new PublishResponse( + randomValueOtherThan(publishResponse.getTerm(), ESTestCase::randomNonNegativeLong), + publishResponse.getVersion() + ); + case 1 -> + // change version + new PublishResponse( + publishResponse.getTerm(), + randomValueOtherThan(publishResponse.getVersion(), ESTestCase::randomNonNegativeLong) + ); + default -> throw new AssertionError(); } ); } @@ -141,61 +134,51 @@ public void testPublishWithJoinResponseEqualsHashCodeSerialization() { initialPublishResponse, randomBoolean() ? Optional.empty() : Optional.of(initialJoin) ); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialPublishWithJoinResponse, - (CopyFunction) publishWithJoinResponse -> copyWriteable( - publishWithJoinResponse, - writableRegistry(), - PublishWithJoinResponse::new - ), - publishWithJoinResponse -> { - switch (randomInt(1)) { - case 0: - // change publish response - return new PublishWithJoinResponse( - new PublishResponse(randomNonNegativeLong(), randomNonNegativeLong()), - publishWithJoinResponse.getJoin() - ); - case 1: - // change optional join - Join newJoin = new Join( - createNode(randomAlphaOfLength(10)), - createNode(randomAlphaOfLength(10)), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong() - ); - return new PublishWithJoinResponse( - publishWithJoinResponse.getPublishResponse(), - publishWithJoinResponse.getJoin().isPresent() && randomBoolean() ? Optional.empty() : Optional.of(newJoin) - ); - default: - throw new AssertionError(); + publishWithJoinResponse -> copyWriteable(publishWithJoinResponse, writableRegistry(), PublishWithJoinResponse::new), + publishWithJoinResponse -> switch (randomInt(1)) { + case 0 -> + // change publish response + new PublishWithJoinResponse( + new PublishResponse(randomNonNegativeLong(), randomNonNegativeLong()), + publishWithJoinResponse.getJoin() + ); + case 1 -> { + // change optional join + Join newJoin = new Join( + createNode(randomAlphaOfLength(10)), + createNode(randomAlphaOfLength(10)), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong() + ); + yield new PublishWithJoinResponse( + publishWithJoinResponse.getPublishResponse(), + publishWithJoinResponse.getJoin().isPresent() && randomBoolean() ? Optional.empty() : Optional.of(newJoin) + ); } + default -> throw new AssertionError(); } ); } public void testStartJoinRequestEqualsHashCodeSerialization() { StartJoinRequest initialStartJoinRequest = new StartJoinRequest(createNode(randomAlphaOfLength(10)), randomNonNegativeLong()); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialStartJoinRequest, - (CopyFunction) startJoinRequest -> copyWriteable(startJoinRequest, writableRegistry(), StartJoinRequest::new), - startJoinRequest -> { - return switch (randomInt(1)) { - case 0 -> - // change sourceNode - new StartJoinRequest(createNode(randomAlphaOfLength(20)), startJoinRequest.getTerm()); - case 1 -> - // change term - new StartJoinRequest( - startJoinRequest.getSourceNode(), - randomValueOtherThan(startJoinRequest.getTerm(), ESTestCase::randomNonNegativeLong) - ); - default -> throw new AssertionError(); - }; + startJoinRequest -> copyWriteable(startJoinRequest, writableRegistry(), StartJoinRequest::new), + startJoinRequest -> switch (randomInt(1)) { + case 0 -> + // change sourceNode + new StartJoinRequest(createNode(randomAlphaOfLength(20)), startJoinRequest.getTerm()); + case 1 -> + // change term + new StartJoinRequest( + startJoinRequest.getSourceNode(), + randomValueOtherThan(startJoinRequest.getTerm(), ESTestCase::randomNonNegativeLong) + ); + default -> throw new AssertionError(); } ); } @@ -206,31 +189,28 @@ public void testApplyCommitEqualsHashCodeSerialization() { randomNonNegativeLong(), randomNonNegativeLong() ); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialApplyCommit, - (CopyFunction) applyCommit -> copyWriteable(applyCommit, writableRegistry(), ApplyCommitRequest::new), - applyCommit -> { - return switch (randomInt(2)) { - case 0 -> - // change sourceNode - new ApplyCommitRequest(createNode(randomAlphaOfLength(20)), applyCommit.getTerm(), applyCommit.getVersion()); - case 1 -> - // change term - new ApplyCommitRequest( - applyCommit.getSourceNode(), - randomValueOtherThan(applyCommit.getTerm(), ESTestCase::randomNonNegativeLong), - applyCommit.getVersion() - ); - case 2 -> - // change version - new ApplyCommitRequest( - applyCommit.getSourceNode(), - applyCommit.getTerm(), - randomValueOtherThan(applyCommit.getVersion(), ESTestCase::randomNonNegativeLong) - ); - default -> throw new AssertionError(); - }; + applyCommit -> copyWriteable(applyCommit, writableRegistry(), ApplyCommitRequest::new), + applyCommit -> switch (randomInt(2)) { + case 0 -> + // change sourceNode + new ApplyCommitRequest(createNode(randomAlphaOfLength(20)), applyCommit.getTerm(), applyCommit.getVersion()); + case 1 -> + // change term + new ApplyCommitRequest( + applyCommit.getSourceNode(), + randomValueOtherThan(applyCommit.getTerm(), ESTestCase::randomNonNegativeLong), + applyCommit.getVersion() + ); + case 2 -> + // change version + new ApplyCommitRequest( + applyCommit.getSourceNode(), + applyCommit.getTerm(), + randomValueOtherThan(applyCommit.getVersion(), ESTestCase::randomNonNegativeLong) + ); + default -> throw new AssertionError(); } ); } @@ -246,61 +226,66 @@ public void testJoinRequestEqualsHashCodeSerialization() { JoinRequest initialJoinRequest = new JoinRequest( initialJoin.getSourceNode(), CompatibilityVersionsUtils.fakeSystemIndicesRandom(), + Set.of(generateRandomStringArray(10, 10, false)), randomNonNegativeLong(), randomBoolean() ? Optional.empty() : Optional.of(initialJoin) ); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialJoinRequest, - (CopyFunction) joinRequest -> copyWriteable(joinRequest, writableRegistry(), JoinRequest::new), - joinRequest -> { - if (randomBoolean() && joinRequest.getOptionalJoin().isPresent() == false) { - return new JoinRequest( + joinRequest -> copyWriteable(joinRequest, writableRegistry(), JoinRequest::new), + joinRequest -> switch (randomInt(4)) { + case 0 -> { + assumeTrue("Optional join needs to be empty", joinRequest.getOptionalJoin().isEmpty()); + yield new JoinRequest( createNode(randomAlphaOfLength(10)), joinRequest.getCompatibilityVersions(), + joinRequest.getFeatures(), joinRequest.getMinimumTerm(), joinRequest.getOptionalJoin() ); - } else if (randomBoolean()) { - return new JoinRequest( - joinRequest.getSourceNode(), - new CompatibilityVersions( - TransportVersionUtils.randomVersion(Set.of(joinRequest.getCompatibilityVersions().transportVersion())), - Map.of() - ), - joinRequest.getMinimumTerm(), - joinRequest.getOptionalJoin() - ); - } else if (randomBoolean()) { - return new JoinRequest( + } + case 1 -> new JoinRequest( + joinRequest.getSourceNode(), + new CompatibilityVersions( + TransportVersionUtils.randomVersion(Set.of(joinRequest.getCompatibilityVersions().transportVersion())), + Map.of() + ), + joinRequest.getFeatures(), + joinRequest.getMinimumTerm(), + joinRequest.getOptionalJoin() + ); + case 2 -> new JoinRequest( + joinRequest.getSourceNode(), + joinRequest.getCompatibilityVersions(), + Set.of(generateRandomStringArray(10, 10, false)), + joinRequest.getMinimumTerm(), + joinRequest.getOptionalJoin() + ); + case 3 -> new JoinRequest( + joinRequest.getSourceNode(), + joinRequest.getCompatibilityVersions(), + joinRequest.getFeatures(), + randomValueOtherThan(joinRequest.getMinimumTerm(), ESTestCase::randomNonNegativeLong), + joinRequest.getOptionalJoin() + ); + case 4 -> { + // change OptionalJoin + Join newJoin = new Join( joinRequest.getSourceNode(), - joinRequest.getCompatibilityVersions(), - randomValueOtherThan(joinRequest.getMinimumTerm(), ESTestCase::randomNonNegativeLong), - joinRequest.getOptionalJoin() + createNode(randomAlphaOfLength(10)), + randomNonNegativeLong(), + randomNonNegativeLong(), + randomNonNegativeLong() ); - } else { - // change OptionalJoin - final Optional newOptionalJoin; - if (joinRequest.getOptionalJoin().isPresent() && randomBoolean()) { - newOptionalJoin = Optional.empty(); - } else { - newOptionalJoin = Optional.of( - new Join( - joinRequest.getSourceNode(), - createNode(randomAlphaOfLength(10)), - randomNonNegativeLong(), - randomNonNegativeLong(), - randomNonNegativeLong() - ) - ); - } - return new JoinRequest( + yield new JoinRequest( joinRequest.getSourceNode(), joinRequest.getCompatibilityVersions(), + joinRequest.getFeatures(), joinRequest.getMinimumTerm(), - newOptionalJoin + joinRequest.getOptionalJoin().isPresent() && randomBoolean() ? Optional.empty() : Optional.of(newJoin) ); } + default -> throw new AssertionError(); } ); } @@ -318,16 +303,13 @@ public ClusterState randomClusterState() { public void testPreVoteRequestEqualsHashCodeSerialization() { PreVoteRequest initialPreVoteRequest = new PreVoteRequest(createNode(randomAlphaOfLength(10)), randomNonNegativeLong()); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialPreVoteRequest, - (CopyFunction) preVoteRequest -> copyWriteable(preVoteRequest, writableRegistry(), PreVoteRequest::new), - preVoteRequest -> { - if (randomBoolean()) { - return new PreVoteRequest(createNode(randomAlphaOfLength(10)), preVoteRequest.getCurrentTerm()); - } else { - return new PreVoteRequest(preVoteRequest.getSourceNode(), randomNonNegativeLong()); - } + preVoteRequest -> copyWriteable(preVoteRequest, writableRegistry(), PreVoteRequest::new), + preVoteRequest -> switch (randomInt(1)) { + case 0 -> new PreVoteRequest(createNode(randomAlphaOfLength(10)), preVoteRequest.getCurrentTerm()); + case 1 -> new PreVoteRequest(preVoteRequest.getSourceNode(), randomNonNegativeLong()); + default -> throw new AssertionError(); } ); } @@ -339,41 +321,38 @@ public void testPreVoteResponseEqualsHashCodeSerialization() { randomLongBetween(1, currentTerm), randomNonNegativeLong() ); - // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type EqualsHashCodeTestUtils.checkEqualsAndHashCode( initialPreVoteResponse, - (CopyFunction) preVoteResponse -> copyWriteable(preVoteResponse, writableRegistry(), PreVoteResponse::new), - preVoteResponse -> { - switch (randomInt(2)) { - case 0: - assumeTrue("last-accepted term is Long.MAX_VALUE", preVoteResponse.getLastAcceptedTerm() < Long.MAX_VALUE); - return new PreVoteResponse( - randomValueOtherThan( - preVoteResponse.getCurrentTerm(), - () -> randomLongBetween(preVoteResponse.getLastAcceptedTerm(), Long.MAX_VALUE) - ), - preVoteResponse.getLastAcceptedTerm(), - preVoteResponse.getLastAcceptedVersion() - ); - case 1: - assumeTrue("current term is 1", 1 < preVoteResponse.getCurrentTerm()); - return new PreVoteResponse( - preVoteResponse.getCurrentTerm(), - randomValueOtherThan( - preVoteResponse.getLastAcceptedTerm(), - () -> randomLongBetween(1, preVoteResponse.getCurrentTerm()) - ), - preVoteResponse.getLastAcceptedVersion() - ); - case 2: - return new PreVoteResponse( + preVoteResponse -> copyWriteable(preVoteResponse, writableRegistry(), PreVoteResponse::new), + preVoteResponse -> switch (randomInt(2)) { + case 0 -> { + assumeTrue("last-accepted term is Long.MAX_VALUE", preVoteResponse.getLastAcceptedTerm() < Long.MAX_VALUE); + yield new PreVoteResponse( + randomValueOtherThan( preVoteResponse.getCurrentTerm(), + () -> randomLongBetween(preVoteResponse.getLastAcceptedTerm(), Long.MAX_VALUE) + ), + preVoteResponse.getLastAcceptedTerm(), + preVoteResponse.getLastAcceptedVersion() + ); + } + case 1 -> { + assumeTrue("current term is 1", 1 < preVoteResponse.getCurrentTerm()); + yield new PreVoteResponse( + preVoteResponse.getCurrentTerm(), + randomValueOtherThan( preVoteResponse.getLastAcceptedTerm(), - randomValueOtherThan(preVoteResponse.getLastAcceptedVersion(), ESTestCase::randomNonNegativeLong) - ); - default: - throw new AssertionError(); + () -> randomLongBetween(1, preVoteResponse.getCurrentTerm()) + ), + preVoteResponse.getLastAcceptedVersion() + ); } + case 2 -> new PreVoteResponse( + preVoteResponse.getCurrentTerm(), + preVoteResponse.getLastAcceptedTerm(), + randomValueOtherThan(preVoteResponse.getLastAcceptedVersion(), ESTestCase::randomNonNegativeLong) + ); + default -> throw new AssertionError(); } ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java index 0d64e6ec20b7f..559c0a3628059 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinExecutorTests.java @@ -32,7 +32,9 @@ import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -43,6 +45,7 @@ import java.util.Collections; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; @@ -82,12 +85,12 @@ public void testPreventJoinClusterWithNewerIndices() { .build(); metaBuilder.put(indexMetadata, false); Metadata metadata = metaBuilder.build(); - NodeJoinExecutor.ensureIndexCompatibility(IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current(), metadata); + NodeJoinExecutor.ensureIndexCompatibility(IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), metadata); expectThrows( IllegalStateException.class, () -> NodeJoinExecutor.ensureIndexCompatibility( - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion(IndexVersion.current()), metadata ) @@ -106,7 +109,7 @@ public void testPreventJoinClusterWithUnsupportedIndices() { Metadata metadata = metaBuilder.build(); expectThrows( IllegalStateException.class, - () -> NodeJoinExecutor.ensureIndexCompatibility(IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current(), metadata) + () -> NodeJoinExecutor.ensureIndexCompatibility(IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), metadata) ); } @@ -114,11 +117,13 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); final Version version = randomCompatibleVersion(random(), Version.CURRENT); builder.add( - DiscoveryNodeUtils.builder(UUIDs.base64UUID()).version(version, IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()).build() + DiscoveryNodeUtils.builder(UUIDs.base64UUID()) + .version(version, IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) + .build() ); builder.add( DiscoveryNodeUtils.builder(UUIDs.base64UUID()) - .version(randomCompatibleVersion(random(), version), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(randomCompatibleVersion(random(), version), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build() ); DiscoveryNodes nodes = builder.build(); @@ -169,7 +174,7 @@ public void testSuccess() { .build(); metaBuilder.put(indexMetadata, false); Metadata metadata = metaBuilder.build(); - NodeJoinExecutor.ensureIndexCompatibility(IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current(), metadata); + NodeJoinExecutor.ensureIndexCompatibility(IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), metadata); } public static Settings.Builder randomCompatibleVersionSettings() { @@ -221,7 +226,16 @@ public void testUpdatesNodeWithNewRoles() throws Exception { final var resultingState = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( clusterState, executor, - List.of(JoinTask.singleNode(actualNode, CompatibilityVersionsUtils.staticCurrent(), TEST_REASON, NOT_COMPLETED_LISTENER, 0L)) + List.of( + JoinTask.singleNode( + actualNode, + CompatibilityVersionsUtils.staticCurrent(), + Set.of(), + TEST_REASON, + NOT_COMPLETED_LISTENER, + 0L + ) + ) ); assertThat(resultingState.getNodes().get(actualNode.getId()).getRoles(), equalTo(actualNode.getRoles())); @@ -255,6 +269,7 @@ public void testRejectsStatesWithStaleTerm() { JoinTask.singleNode( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, executorTerm @@ -266,6 +281,7 @@ public void testRejectsStatesWithStaleTerm() { new JoinTask.NodeJoinTask( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ) @@ -317,6 +333,7 @@ public void testRejectsStatesWithOtherMaster() { JoinTask.singleNode( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, executorTerm @@ -328,6 +345,7 @@ public void testRejectsStatesWithOtherMaster() { new JoinTask.NodeJoinTask( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ) @@ -370,6 +388,7 @@ public void testRejectsStatesWithNoMasterIfNotBecomingMaster() { JoinTask.singleNode( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, executorTerm @@ -418,12 +437,14 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { new JoinTask.NodeJoinTask( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ), new JoinTask.NodeJoinTask( otherNodeNew, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ) @@ -450,6 +471,7 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { JoinTask.singleNode( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, executorTerm @@ -457,6 +479,7 @@ public void testRemovesOlderNodeInstancesWhenBecomingMaster() throws Exception { JoinTask.singleNode( otherNodeOld, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, executorTerm @@ -513,12 +536,14 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { new JoinTask.NodeJoinTask( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ), new JoinTask.NodeJoinTask( otherNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ) @@ -537,6 +562,7 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { new JoinTask.NodeJoinTask( masterNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ) @@ -552,6 +578,7 @@ public void testUpdatesVotingConfigExclusionsIfNeeded() throws Exception { JoinTask.singleNode( otherNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, executorTerm @@ -621,7 +648,16 @@ public void testDesiredNodesMembershipIsUpgradedWhenNewNodesJoin() throws Except final var desiredNodes = DesiredNodes.latestFromClusterState(clusterState); var tasks = joiningNodes.stream() - .map(node -> JoinTask.singleNode(node, CompatibilityVersionsUtils.staticCurrent(), TEST_REASON, NOT_COMPLETED_LISTENER, 0L)) + .map( + node -> JoinTask.singleNode( + node, + CompatibilityVersionsUtils.staticCurrent(), + Set.of(), + TEST_REASON, + NOT_COMPLETED_LISTENER, + 0L + ) + ) .toList(); final var updatedClusterState = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful(clusterState, executor, tasks); @@ -658,7 +694,13 @@ public void testDesiredNodesMembershipIsUpgradedWhenANewMasterIsElected() throws clusterState.nodes() .stream() .map( - node -> new JoinTask.NodeJoinTask(node, CompatibilityVersionsUtils.staticCurrent(), TEST_REASON, NOT_COMPLETED_LISTENER) + node -> new JoinTask.NodeJoinTask( + node, + CompatibilityVersionsUtils.staticCurrent(), + Set.of(), + TEST_REASON, + NOT_COMPLETED_LISTENER + ) ), 1L ); @@ -713,7 +755,7 @@ public void testPerNodeLogging() { .createTaskQueue("test", Priority.NORMAL, executor) .submitTask( "test", - JoinTask.singleNode(node1, CompatibilityVersionsUtils.staticCurrent(), TEST_REASON, future, 0L), + JoinTask.singleNode(node1, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, future, 0L), null ), 10, @@ -742,7 +784,14 @@ public void testPerNodeLogging() { .createTaskQueue("test", Priority.NORMAL, executor) .submitTask( "test", - JoinTask.singleNode(node2, CompatibilityVersionsUtils.staticCurrent(), testReasonWithLink, future, 0L), + JoinTask.singleNode( + node2, + CompatibilityVersionsUtils.staticCurrent(), + Set.of(), + testReasonWithLink, + future, + 0L + ), null ), 10, @@ -782,6 +831,7 @@ public void testResetsNodeLeftGenerationOnNewTerm() throws Exception { new JoinTask.NodeJoinTask( otherNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER ) @@ -795,6 +845,44 @@ public void testResetsNodeLeftGenerationOnNewTerm() throws Exception { assertEquals(0L, resultingState.nodes().getNodeLeftGeneration()); } + public void testSetsNodeFeaturesWhenRejoining() throws Exception { + final AllocationService allocationService = createAllocationService(); + final RerouteService rerouteService = (reason, priority, listener) -> listener.onResponse(null); + + final NodeJoinExecutor executor = new NodeJoinExecutor(allocationService, rerouteService); + + final DiscoveryNode masterNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + + final DiscoveryNode rejoinNode = DiscoveryNodeUtils.create(UUIDs.base64UUID()); + final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes( + DiscoveryNodes.builder().add(masterNode).localNodeId(masterNode.getId()).masterNodeId(masterNode.getId()).add(rejoinNode) + ) + .nodeFeatures(Map.of(masterNode.getId(), Set.of("f1", "f2"), rejoinNode.getId(), Set.of())) + .build(); + + assertThat(clusterState.clusterFeatures().clusterHasFeature(new NodeFeature("f1")), is(false)); + assertThat(clusterState.clusterFeatures().clusterHasFeature(new NodeFeature("f2")), is(false)); + + final var resultingState = ClusterStateTaskExecutorUtils.executeAndAssertSuccessful( + clusterState, + executor, + List.of( + JoinTask.singleNode( + rejoinNode, + CompatibilityVersionsUtils.staticCurrent(), + Set.of("f1", "f2"), + TEST_REASON, + NOT_COMPLETED_LISTENER, + 0L + ) + ) + ); + + assertThat(resultingState.clusterFeatures().clusterHasFeature(new NodeFeature("f1")), is(true)); + assertThat(resultingState.clusterFeatures().clusterHasFeature(new NodeFeature("f2")), is(true)); + } + private DesiredNodeWithStatus createActualizedDesiredNode() { return new DesiredNodeWithStatus(randomDesiredNode(), DesiredNodeWithStatus.Status.ACTUALIZED); } @@ -805,9 +893,17 @@ private DesiredNodeWithStatus createPendingDesiredNode() { private static JoinTask createRandomTask(DiscoveryNode node, long term) { return randomBoolean() - ? JoinTask.singleNode(node, CompatibilityVersionsUtils.staticCurrent(), TEST_REASON, NOT_COMPLETED_LISTENER, term) + ? JoinTask.singleNode(node, CompatibilityVersionsUtils.staticCurrent(), Set.of(), TEST_REASON, NOT_COMPLETED_LISTENER, term) : JoinTask.completingElection( - Stream.of(new JoinTask.NodeJoinTask(node, CompatibilityVersionsUtils.staticCurrent(), TEST_REASON, NOT_COMPLETED_LISTENER)), + Stream.of( + new JoinTask.NodeJoinTask( + node, + CompatibilityVersionsUtils.staticCurrent(), + Set.of(), + TEST_REASON, + NOT_COMPLETED_LISTENER + ) + ), term ); } diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java index a068029667eb2..e9e5b1c5338df 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeJoinTests.java @@ -230,7 +230,8 @@ protected void onSendRequest(long requestId, String action, TransportRequest req new Reconfigurator(Settings.EMPTY, clusterSettings), LeaderHeartbeatService.NO_OP, StatefulPreVoteCollector::new, - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); transportService.start(); transportService.acceptIncomingRequests(); @@ -318,7 +319,7 @@ public void testJoinWithHigherTermElectsLeader() { assertNull(coordinator.getStateForMasterService().nodes().getMasterNodeId()); long newTerm = initialTerm + randomLongBetween(1, 10); Future fut = joinNodeAsync( - new JoinRequest(node1, version1, newTerm, Optional.of(new Join(node1, node0, newTerm, initialTerm, initialVersion))) + new JoinRequest(node1, version1, Set.of(), newTerm, Optional.of(new Join(node1, node0, newTerm, initialTerm, initialVersion))) ); assertEquals(Coordinator.Mode.LEADER, coordinator.getMode()); assertNull(coordinator.getStateForMasterService().nodes().getMasterNodeId()); @@ -345,7 +346,13 @@ public void testJoinWithHigherTermButBetterStateGetsRejected() { expectThrows( CoordinationStateRejectedException.class, () -> joinNodeAndRun( - new JoinRequest(node1, version1, newTerm, Optional.of(new Join(node1, node0, newTerm, initialTerm, higherVersion))) + new JoinRequest( + node1, + version1, + Set.of(), + newTerm, + Optional.of(new Join(node1, node0, newTerm, initialTerm, higherVersion)) + ) ) ); assertFalse(isLocalNodeElectedMaster()); @@ -368,6 +375,7 @@ public void testJoinWithHigherTermButBetterStateStillElectsMasterThroughSelfJoin new JoinRequest( node1, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node1, node0, newTerm, initialTerm, higherVersion)) ) @@ -391,6 +399,7 @@ public void testJoinElectedLeader() { new JoinRequest( node0, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node0, node0, newTerm, initialTerm, initialVersion)) ) @@ -401,6 +410,7 @@ public void testJoinElectedLeader() { new JoinRequest( node1, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node1, node0, newTerm, initialTerm, initialVersion)) ) @@ -425,6 +435,7 @@ public void testJoinElectedLeaderWithHigherTerm() { new JoinRequest( node0, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node0, node0, newTerm, initialTerm, initialVersion)) ) @@ -432,7 +443,7 @@ public void testJoinElectedLeaderWithHigherTerm() { assertTrue(isLocalNodeElectedMaster()); long newerTerm = newTerm + randomLongBetween(1, 10); - joinNodeAndRun(new JoinRequest(node1, CompatibilityVersionsUtils.staticCurrent(), newerTerm, Optional.empty())); + joinNodeAndRun(new JoinRequest(node1, CompatibilityVersionsUtils.staticCurrent(), Set.of(), newerTerm, Optional.empty())); assertThat(coordinator.getCurrentTerm(), greaterThanOrEqualTo(newerTerm)); assertTrue(isLocalNodeElectedMaster()); } @@ -454,6 +465,7 @@ public void testJoinAccumulation() { new JoinRequest( node0, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node0, node0, newTerm, initialTerm, initialVersion)) ) @@ -465,6 +477,7 @@ public void testJoinAccumulation() { new JoinRequest( node1, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node1, node0, newTerm, initialTerm, initialVersion)) ) @@ -476,6 +489,7 @@ public void testJoinAccumulation() { new JoinRequest( node2, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node2, node0, newTerm, initialTerm, initialVersion)) ) @@ -505,6 +519,7 @@ public void testJoinFollowerWithHigherTerm() throws Exception { new JoinRequest( node1, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newerTerm, Optional.of(new Join(node1, node0, newerTerm, initialTerm, initialVersion)) ) @@ -539,6 +554,7 @@ public void testJoinUpdateVotingConfigExclusion() throws Exception { new JoinRequest( knownJoiningNode, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), initialTerm, Optional.of(new Join(knownJoiningNode, initialNode, newerTerm, initialTerm, initialVersion)) ) @@ -627,7 +643,7 @@ public void testJoinFollowerFails() throws Exception { assertThat( expectThrows( CoordinationStateRejectedException.class, - () -> joinNodeAndRun(new JoinRequest(node1, version1, newTerm, Optional.empty())) + () -> joinNodeAndRun(new JoinRequest(node1, version1, Set.of(), newTerm, Optional.empty())) ).getMessage(), containsString("join target is a follower") ); @@ -647,7 +663,7 @@ public void testBecomeFollowerFailsPendingJoin() throws Exception { ); long newTerm = initialTerm + randomLongBetween(1, 10); Future fut = joinNodeAsync( - new JoinRequest(node0, version0, newTerm, Optional.of(new Join(node0, node0, newTerm, initialTerm, initialVersion))) + new JoinRequest(node0, version0, Set.of(), newTerm, Optional.of(new Join(node0, node0, newTerm, initialTerm, initialVersion))) ); deterministicTaskQueue.runAllRunnableTasks(); assertFalse(fut.isDone()); @@ -698,6 +714,7 @@ public void testConcurrentJoining() { node -> new JoinRequest( node, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node, localNode, newTerm, initialTerm, initialVersion)) ) @@ -715,6 +732,7 @@ public void testConcurrentJoining() { return new JoinRequest( node, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node, localNode, newTerm, initialTerm, initialVersion)) ); @@ -723,6 +741,7 @@ public void testConcurrentJoining() { return new JoinRequest( node, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node, localNode, randomLongBetween(0, initialTerm), initialTerm, initialVersion)) ); @@ -731,6 +750,7 @@ public void testConcurrentJoining() { return new JoinRequest( node, CompatibilityVersionsUtils.staticCurrent(), + Set.of(), newTerm, Optional.of(new Join(node, localNode, newTerm, initialTerm, initialVersion + randomLongBetween(1, 10))) ); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeLeftExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeLeftExecutorTests.java index 4f03cbe3a1fc0..2c4bd7444f060 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/NodeLeftExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/NodeLeftExecutorTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; -import org.elasticsearch.cluster.version.CompatibilityVersions; import org.elasticsearch.common.Priority; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -27,7 +26,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -69,15 +67,8 @@ public void testRerouteAfterRemovingNodes() throws Exception { final AtomicReference remainingNodesClusterState = new AtomicReference<>(); final NodeLeftExecutor executor = new NodeLeftExecutor(allocationService) { @Override - protected ClusterState remainingNodesClusterState( - ClusterState currentState, - DiscoveryNodes.Builder remainingNodesBuilder, - Map compatibilityVersions - ) { - remainingNodesClusterState.set( - super.remainingNodesClusterState(currentState, remainingNodesBuilder, compatibilityVersions) - ); - return remainingNodesClusterState.get(); + void remainingNodesClusterState(ClusterState state) { + remainingNodesClusterState.set(state); } }; diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java index d266988f0123d..0cdc5de86a8d3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/PublicationTransportHandlerTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Nullable; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; @@ -155,7 +155,7 @@ private static boolean isDiff(BytesTransportRequest request, TransportVersion ve IOUtils.close(in); } } catch (IOException e) { - throw new AssertionError("unexpected", e); + return fail(e); } } @@ -229,7 +229,7 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { var node = DiscoveryNodeUtils.builder("node-" + allNodes.size()) .version( VersionUtils.randomCompatibleVersion(random(), Version.CURRENT), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ) .build(); @@ -364,7 +364,7 @@ public void testIncludesLastCommittedFieldsInDiffSerialization() { final var otherNode = DiscoveryNodeUtils.builder("otherNode") .version( VersionUtils.randomCompatibleVersion(random(), Version.CURRENT), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ) .build(); @@ -387,13 +387,13 @@ public void onResponse(TransportResponse transportResponse) { @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } }), new Task(randomNonNegativeLong(), "test", "test", "", TaskId.EMPTY_TASK_ID, Map.of())); } catch (IncompatibleClusterStateVersionException e) { context.handler().handleException(new RemoteTransportException("wrapped", e)); } catch (Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } } }; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index e4a98c5da335b..0680392ffb3f0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; @@ -475,7 +476,7 @@ public void testIndexAndAliasWithSameName() { final IllegalArgumentException iae = expectThrows( IllegalArgumentException.class, () -> IndexMetadata.builder("index") - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.V_8_5_0)) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_8_5_0)) .numberOfShards(1) .numberOfReplicas(0) .putAlias(AliasMetadata.builder("index").build()) @@ -487,7 +488,7 @@ public void testIndexAndAliasWithSameName() { public void testRepairIndexAndAliasWithSameName() { final IndexMetadata indexMetadata = IndexMetadata.builder("index") - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.V_8_5_0)) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_8_5_0)) .numberOfShards(1) .numberOfReplicas(0) .putAlias(AliasMetadata.builder("index").build()) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java index 9ae3afe1d9b2a..eb9e26d08ed9c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataVerifierTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; @@ -94,12 +95,12 @@ public void testCustomSimilarity() { .put("index.similarity.my_similarity.after_effect", "l") .build() ); - service.verifyIndexMetadata(src, IndexVersion.MINIMUM_COMPATIBLE); + service.verifyIndexMetadata(src, IndexVersions.MINIMUM_COMPATIBLE); } public void testIncompatibleVersion() { IndexMetadataVerifier service = getIndexMetadataVerifier(); - IndexVersion minCompat = IndexVersion.MINIMUM_COMPATIBLE; + IndexVersion minCompat = IndexVersions.MINIMUM_COMPATIBLE; IndexVersion indexCreated = IndexVersion.fromId(randomIntBetween(1000099, minCompat.id() - 1)); final IndexMetadata metadata = newIndexMeta( "foo", @@ -107,7 +108,7 @@ public void testIncompatibleVersion() { ); String message = expectThrows( IllegalStateException.class, - () -> service.verifyIndexMetadata(metadata, IndexVersion.MINIMUM_COMPATIBLE) + () -> service.verifyIndexMetadata(metadata, IndexVersions.MINIMUM_COMPATIBLE) ).getMessage(); assertThat( message, @@ -129,7 +130,7 @@ public void testIncompatibleVersion() { indexCreated = IndexVersionUtils.randomVersionBetween(random(), minCompat, IndexVersion.current()); IndexMetadata goodMeta = newIndexMeta("foo", Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, indexCreated).build()); - service.verifyIndexMetadata(goodMeta, IndexVersion.MINIMUM_COMPATIBLE); + service.verifyIndexMetadata(goodMeta, IndexVersions.MINIMUM_COMPATIBLE); } private IndexMetadataVerifier getIndexMetadataVerifier() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 8df51a08b5ecf..43d64522ee6fb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContextHelper; @@ -1265,7 +1266,7 @@ public void testRejectTranslogRetentionSettings() { if (randomBoolean()) { settings.put( SETTING_VERSION_CREATED, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()) ); } request.settings(settings.build()); @@ -1300,7 +1301,7 @@ public void testDeprecateTranslogRetentionSettings() { } else { settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), between(1, 128) + "mb"); } - settings.put(SETTING_VERSION_CREATED, IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0)); + settings.put(SETTING_VERSION_CREATED, IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0)); request.settings(settings.build()); aggregateIndexSettings( ClusterState.EMPTY_STATE, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index ad34289d37fed..fe0dd5ea1a5e7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.alias.RandomAliasActionsGenerator; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.ingest.IngestMetadata; @@ -774,19 +775,19 @@ public void testFindMappingsWithFilters() throws IOException { public void testOldestIndexComputation() { Metadata metadata = buildIndicesWithVersions( - IndexVersion.V_7_0_0, + IndexVersions.V_7_0_0, IndexVersion.current(), IndexVersion.fromId(IndexVersion.current().id() + 1) ).build(); - assertEquals(IndexVersion.V_7_0_0, metadata.oldestIndexVersion()); + assertEquals(IndexVersions.V_7_0_0, metadata.oldestIndexVersion()); Metadata.Builder b = Metadata.builder(); assertEquals(IndexVersion.current(), b.build().oldestIndexVersion()); Throwable ex = expectThrows( IllegalArgumentException.class, - () -> buildIndicesWithVersions(IndexVersion.V_7_0_0, IndexVersion.ZERO, IndexVersion.fromId(IndexVersion.current().id() + 1)) + () -> buildIndicesWithVersions(IndexVersions.V_7_0_0, IndexVersions.ZERO, IndexVersion.fromId(IndexVersion.current().id() + 1)) .build() ); @@ -1873,8 +1874,8 @@ public void testHiddenAliasValidation() { public void testSystemAliasValidationMixedVersionSystemAndRegularFails() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ); final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); @@ -1923,8 +1924,8 @@ public void testSystemAliasValidationNewSystemAndRegularFails() { public void testSystemAliasOldSystemAndNewRegular() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ); final IndexMetadata oldVersionSystem = buildIndexWithAlias(".oldVersionSystem", SYSTEM_ALIAS_NAME, null, random7xVersion, true); final IndexMetadata regularIndex = buildIndexWithAlias("regular1", SYSTEM_ALIAS_NAME, false, IndexVersion.current(), false); @@ -1936,8 +1937,8 @@ public void testSystemAliasOldSystemAndNewRegular() { public void testSystemIndexValidationAllRegular() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ); final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); @@ -1950,8 +1951,8 @@ public void testSystemIndexValidationAllRegular() { public void testSystemAliasValidationAllSystemSomeOld() { final IndexVersion random7xVersion = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ); final IndexMetadata currentVersionSystem = buildIndexWithAlias(".system1", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); final IndexMetadata currentVersionSystem2 = buildIndexWithAlias(".system2", SYSTEM_ALIAS_NAME, null, IndexVersion.current(), true); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java index 0e1f7caa3f432..780b0dd138563 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java @@ -36,7 +36,6 @@ import org.junit.Before; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @@ -49,6 +48,9 @@ import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.elasticsearch.test.ClusterServiceUtils.setState; +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -90,7 +92,7 @@ public void testCalculateChangesAddChangeAndDelete() { IndexTemplateMetadata.builder("changed_test_template").patterns(randomIndexPatterns()).build() ); - final TemplateUpgradeService service = new TemplateUpgradeService(null, clusterService, threadPool, Arrays.asList(templates -> { + final TemplateUpgradeService service = new TemplateUpgradeService(null, clusterService, threadPool, List.of(templates -> { if (shouldAdd) { assertNull( templates.put( @@ -126,23 +128,22 @@ public void testCalculateChangesAddChangeAndDelete() { if (shouldAdd) { assertThat(changes.v1().get("added_test_template"), notNullValue()); if (shouldChange) { - assertThat(changes.v1().keySet(), hasSize(2)); + assertThat(changes.v1(), aMapWithSize(2)); assertThat(changes.v1().get("changed_test_template"), notNullValue()); } else { - assertThat(changes.v1().keySet(), hasSize(1)); + assertThat(changes.v1(), aMapWithSize(1)); } } else { if (shouldChange) { assertThat(changes.v1().get("changed_test_template"), notNullValue()); - assertThat(changes.v1().keySet(), hasSize(1)); + assertThat(changes.v1(), aMapWithSize(1)); } else { - assertThat(changes.v1().keySet(), empty()); + assertThat(changes.v1(), anEmptyMap()); } } if (shouldRemove) { - assertThat(changes.v2(), hasSize(1)); - assertThat(changes.v2().contains("removed_test_template"), equalTo(true)); + assertThat(changes.v2(), contains("removed_test_template")); } else { assertThat(changes.v2(), empty()); } @@ -280,7 +281,7 @@ public void testClusterStateUpdate() throws InterruptedException { return null; }).when(mockIndicesAdminClient).deleteTemplate(any(DeleteIndexTemplateRequest.class), any(ActionListener.class)); - new TemplateUpgradeService(mockClient, clusterService, threadPool, Arrays.asList(templates -> { + TemplateUpgradeService service = new TemplateUpgradeService(mockClient, clusterService, threadPool, List.of(templates -> { assertNull( templates.put( "added_test_template", @@ -329,6 +330,8 @@ public void clusterChanged(ClusterChangedEvent event) { } }; + clusterService.addListener(service); + ClusterState prevState = ClusterState.EMPTY_STATE; ClusterState state = ClusterState.builder(prevState) .nodes( diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java index cc700451931b5..e9a28f9d44cb2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; @@ -225,7 +226,7 @@ public void testDiscoveryNodeToXContent() { transportAddress, withExternalId ? "test-external-id" : "test-name", Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ) ) diff --git a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index 8ef158aa277d8..b01ad0f5b0493 100644 --- a/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -403,7 +404,7 @@ public void testMinMaxNodeVersions() { assertEquals(Version.CURRENT, DiscoveryNodes.EMPTY_NODES.getMaxNodeVersion()); assertEquals(Version.CURRENT.minimumCompatibilityVersion(), DiscoveryNodes.EMPTY_NODES.getMinNodeVersion()); assertEquals(IndexVersion.current(), DiscoveryNodes.EMPTY_NODES.getMaxDataNodeCompatibleIndexVersion()); - assertEquals(IndexVersion.MINIMUM_COMPATIBLE, DiscoveryNodes.EMPTY_NODES.getMinSupportedIndexVersion()); + assertEquals(IndexVersions.MINIMUM_COMPATIBLE, DiscoveryNodes.EMPTY_NODES.getMinSupportedIndexVersion()); // use a mix of versions with major, minor, and patch numbers List dataVersions = List.of( @@ -463,7 +464,7 @@ public void accept(Consumer update, long expectedGenerat TransportVersion.current() ); } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } } assertEquals(expectedGeneration, discoveryNodes.getNodeLeftGeneration()); @@ -481,7 +482,7 @@ public void accept(Consumer update, long expectedGenerat final var node0 = nodeVersionFactory.apply( 0, - new VersionInformation(VersionUtils.randomVersion(random()), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + new VersionInformation(VersionUtils.randomVersion(random()), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) ); testHarness.accept(builder -> builder.add(node0), 0L); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index 9e3be76cbce59..c67671d5b240c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -51,6 +51,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; @@ -713,9 +714,9 @@ public void testCanceledShardIsInitializedRespectingAllocationDeciders() { ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) .nodes( DiscoveryNodes.builder() - .add(newNode("node-0", Version.V_8_10_0, IndexVersion.V_8_10_0)) - .add(newNode("node-1", Version.V_8_9_0, IndexVersion.V_8_9_0)) - .add(newNode("node-2", Version.V_8_9_0, IndexVersion.V_8_9_0)) + .add(newNode("node-0", Version.V_8_10_0, IndexVersions.V_8_10_0)) + .add(newNode("node-1", Version.V_8_9_0, IndexVersions.V_8_9_0)) + .add(newNode("node-2", Version.V_8_9_0, IndexVersions.V_8_9_0)) ) .metadata(Metadata.builder().put(indexMetadata, false)) .routingTable(RoutingTable.builder().add(IndexRoutingTable.builder(shardId.getIndex()).addShard(primary).addShard(replica))) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index 1d796c931270b..43b378c88ccd7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.cluster.ClusterStateChanges; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; @@ -227,7 +228,7 @@ protected DiscoveryNode createNode(DiscoveryNodeRole... mustHaveRoles) { .roles(roles) .version( VersionUtils.randomCompatibleVersion(random(), Version.CURRENT), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ) .build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 484a081273314..74926a7556348 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.index.IndexVersionUtils; @@ -725,14 +726,14 @@ public void testReplicaOnNewestVersionIsPromoted() { newNode( "node3-old", VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), null), - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.MINIMUM_COMPATIBLE, null) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, null) ) ) .add( newNode( "node4-old", VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), null), - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.MINIMUM_COMPATIBLE, null) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, null) ) ) ) diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java index fbf4c8ce3941a..c348cdfe43ecf 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/NodeVersionAllocationDeciderTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.snapshots.EmptySnapshotsInfoService; @@ -324,11 +325,11 @@ public void testRebalanceDoesNotAllocatePrimaryAndReplicasOnDifferentVersionNode final DiscoveryNode newNode = DiscoveryNodeUtils.builder("newNode").roles(MASTER_DATA_ROLES).build(); final DiscoveryNode oldNode1 = DiscoveryNodeUtils.builder("oldNode1") .roles(MASTER_DATA_ROLES) - .version(VersionUtils.getPreviousVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) + .version(VersionUtils.getPreviousVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) .build(); final DiscoveryNode oldNode2 = DiscoveryNodeUtils.builder("oldNode2") .roles(MASTER_DATA_ROLES) - .version(VersionUtils.getPreviousVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) + .version(VersionUtils.getPreviousVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) .build(); AllocationId allocationId1P = AllocationId.newInitializing(); AllocationId allocationId1R = AllocationId.newInitializing(); @@ -429,11 +430,11 @@ public void testRestoreDoesNotAllocateSnapshotOnOlderNodes() { final DiscoveryNode newNode = DiscoveryNodeUtils.builder("newNode").roles(MASTER_DATA_ROLES).build(); final DiscoveryNode oldNode1 = DiscoveryNodeUtils.builder("oldNode1") .roles(MASTER_DATA_ROLES) - .version(VersionUtils.getPreviousVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) + .version(VersionUtils.getPreviousVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) .build(); final DiscoveryNode oldNode2 = DiscoveryNodeUtils.builder("oldNode2") .roles(MASTER_DATA_ROLES) - .version(VersionUtils.getPreviousVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) + .version(VersionUtils.getPreviousVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.getPreviousVersion()) .build(); final Snapshot snapshot = new Snapshot("rep1", new SnapshotId("snp1", UUIDs.randomBase64UUID())); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java index 708a3125590fd..45e8fe4e525cd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ShardsAvailabilityHealthIndicatorServiceTests.java @@ -44,12 +44,14 @@ import org.elasticsearch.health.SimpleHealthIndicatorDetails; import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.ExecutorNames; import org.elasticsearch.indices.SystemDataStreamDescriptor; import org.elasticsearch.indices.SystemIndexDescriptorUtils; import org.elasticsearch.indices.SystemIndices; +import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.mockito.stubbing.Answer; @@ -1532,6 +1534,157 @@ public void testLimitNumberOfAffectedResources() { } } + public void testShouldBeGreenWhenFrozenIndexIsUnassignedAndOriginalIsAvailable() { + String originalIndex = "logs-2023.07.11-000024"; + String restoredIndex = "restored-logs-2023.07.11-000024"; + var clusterState = createClusterStateWith( + List.of( + IndexMetadata.builder(restoredIndex) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY, originalIndex) + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE) + .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, randomBoolean()) + .build() + ) + .numberOfShards(1) + .numberOfReplicas(0) + .build(), + IndexMetadata.builder(originalIndex) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build()) + .numberOfShards(1) + .numberOfReplicas(0) + .build() + ), + List.of( + index(restoredIndex, new ShardAllocation(randomNodeId(), UNAVAILABLE)), + index(originalIndex, new ShardAllocation(randomNodeId(), AVAILABLE)) + ), + List.of(), + List.of() + ); + var service = createShardsAvailabilityIndicatorService(clusterState); + + HealthIndicatorResult result = service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + assertThat( + result, + equalTo( + createExpectedResult( + GREEN, + "This cluster has 1 unavailable primary shard. This is a mounted shard and the original " + + "shard is available, so there are no data availability problems.", + Map.of("unassigned_primaries", 1, "started_primaries", 1), + List.of(), + List.of( + new Diagnosis(ACTION_CHECK_ALLOCATION_EXPLAIN_API, List.of(new Diagnosis.Resource(INDEX, List.of(restoredIndex)))) + ) + ) + ) + ); + } + + public void testShouldBeRedWhenFrozenIndexIsUnassignedAndOriginalIsUnavailable() { + String originalIndex = "logs-2023.07.11-000024"; + String restoredIndex = "restored-logs-2023.07.11-000024"; + List indexMetadata = new ArrayList<>(2); + List routes = new ArrayList<>(2); + indexMetadata.add( + IndexMetadata.builder(restoredIndex) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY, originalIndex) + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE) + .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, randomBoolean()) + .build() + ) + .numberOfShards(1) + .numberOfReplicas(0) + .build() + ); + routes.add(index(restoredIndex, new ShardAllocation(randomNodeId(), UNAVAILABLE))); + // When original does not exist + { + var clusterState = createClusterStateWith(indexMetadata, routes, List.of(), List.of()); + var service = createShardsAvailabilityIndicatorService(clusterState); + + HealthIndicatorResult result = service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + assertThat( + result, + equalTo( + createExpectedResult( + RED, + "This cluster has 1 unavailable primary shard.", + Map.of("unassigned_primaries", 1), + List.of( + new HealthIndicatorImpact( + NAME, + ShardsAvailabilityHealthIndicatorService.READ_ONLY_PRIMARY_UNASSIGNED_IMPACT_ID, + 1, + "Searching 1 index [" + restoredIndex + "] might return incomplete results.", + List.of(ImpactArea.SEARCH) + ) + ), + List.of( + new Diagnosis( + ACTION_CHECK_ALLOCATION_EXPLAIN_API, + List.of(new Diagnosis.Resource(INDEX, List.of(restoredIndex))) + ) + ) + ) + ) + ); + } + // When original index has unavavailable shards + { + indexMetadata.add( + IndexMetadata.builder(originalIndex) + .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build()) + .numberOfShards(1) + .numberOfReplicas(0) + .build() + ); + routes.add(index(originalIndex, new ShardAllocation(randomNodeId(), UNAVAILABLE))); + var clusterState = createClusterStateWith(indexMetadata, routes, List.of(), List.of()); + var service = createShardsAvailabilityIndicatorService(clusterState); + + HealthIndicatorResult result = service.calculate(true, HealthInfo.EMPTY_HEALTH_INFO); + assertThat( + result, + equalTo( + createExpectedResult( + RED, + "This cluster has 2 unavailable primary shards.", + Map.of("unassigned_primaries", 2), + List.of( + new HealthIndicatorImpact( + NAME, + ShardsAvailabilityHealthIndicatorService.PRIMARY_UNASSIGNED_IMPACT_ID, + 1, + "Cannot add data to 1 index [logs-2023.07.11-000024]." + " Searches might return incomplete results.", + List.of(ImpactArea.INGEST, ImpactArea.SEARCH) + ), + new HealthIndicatorImpact( + NAME, + ShardsAvailabilityHealthIndicatorService.READ_ONLY_PRIMARY_UNASSIGNED_IMPACT_ID, + 1, + "Searching 1 index [restored-logs-2023.07.11-000024] might return incomplete results.", + List.of(ImpactArea.SEARCH) + ) + ), + List.of( + new Diagnosis( + ACTION_CHECK_ALLOCATION_EXPLAIN_API, + List.of(new Diagnosis.Resource(INDEX, List.of(originalIndex, restoredIndex))) + ) + ) + ) + ) + ); + } + } + /** * Creates the {@link SystemIndices} with one standalone system index and a system data stream */ @@ -1791,6 +1944,24 @@ private static IndexRoutingTable index(String name, ShardAllocation primaryState ); } + private static IndexRoutingTable frozenIndex(String name, ShardAllocation primaryState, String originalIndex) { + return index( + IndexMetadata.builder(name) + .settings( + Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY, originalIndex) + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE) + .put(SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_PARTIAL_SETTING_KEY, randomBoolean()) + .build() + ) + .numberOfShards(1) + .numberOfReplicas(0) + .build(), + primaryState + ); + } + private static IndexRoutingTable index(IndexMetadata indexMetadata, ShardAllocation primaryState, ShardAllocation... replicaStates) { var index = indexMetadata.getIndex(); var shardId = new ShardId(index, 0); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 966520070ad70..51701cf9233e4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -2302,7 +2302,7 @@ public void clusterStateProcessed(ClusterState initialState, ClusterState newSta @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } } @@ -2397,7 +2397,7 @@ public void clusterStateProcessed(ClusterState initialState, ClusterState newSta @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } } @@ -2474,7 +2474,7 @@ public ClusterState execute(ClusterState currentState) { @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } }); } diff --git a/server/src/test/java/org/elasticsearch/common/hashing/Murmur3HasherTests.java b/server/src/test/java/org/elasticsearch/common/hashing/Murmur3HasherTests.java index 8574f8debb8c0..fdebec676192c 100644 --- a/server/src/test/java/org/elasticsearch/common/hashing/Murmur3HasherTests.java +++ b/server/src/test/java/org/elasticsearch/common/hashing/Murmur3HasherTests.java @@ -8,14 +8,13 @@ package org.elasticsearch.common.hashing; +import org.elasticsearch.common.Numbers; import org.elasticsearch.common.hash.Murmur3Hasher; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.test.ESTestCase; import java.nio.charset.StandardCharsets; -import static org.hamcrest.Matchers.equalTo; - public class Murmur3HasherTests extends ESTestCase { public void testKnownValues() { @@ -37,13 +36,21 @@ private static void assertHash(long lower, long upper, String inputString, long byte[] bytes = inputString.getBytes(StandardCharsets.UTF_8); Murmur3Hasher mh = new Murmur3Hasher(seed); mh.update(bytes); - MurmurHash3.Hash128 actual = Murmur3Hasher.toHash128(mh.digest()); + MurmurHash3.Hash128 actual = mh.digestHash(); assertHash(expected, actual); } private static void assertHash(MurmurHash3.Hash128 expected, MurmurHash3.Hash128 actual) { assertEquals(expected.h1, actual.h1); assertEquals(expected.h2, actual.h2); + assertEquals(expected, toHash128(expected.getBytes())); + } + + public static MurmurHash3.Hash128 toHash128(byte[] doubleLongBytes) { + MurmurHash3.Hash128 hash128 = new MurmurHash3.Hash128(); + hash128.h1 = Numbers.bytesToLong(doubleLongBytes, 0); + hash128.h2 = Numbers.bytesToLong(doubleLongBytes, 8); + return hash128; } public void testSingleVsSequentialMurmur3() { @@ -85,7 +92,7 @@ public void testSingleVsSequentialMurmur3() { mh.update(splitBytes[k]); } } - MurmurHash3.Hash128 sequentialHash = Murmur3Hasher.toHash128(mh.digest()); - assertThat(singleHash, equalTo(sequentialHash)); + MurmurHash3.Hash128 sequentialHash = mh.digestHash(); + assertHash(singleHash, sequentialHash); } } diff --git a/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java b/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java index a0534ea52dfc4..0b8085fc6d9a7 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/ChunkedLoggingStreamTests.java @@ -137,7 +137,7 @@ public void append(LogEvent event) { Loggers.addAppender(captureLogger, appender); runnable.run(); } catch (Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } finally { Loggers.removeAppender(captureLogger, appender); appender.stop(); @@ -177,7 +177,7 @@ public static BytesReference getDecodedLoggedBody( Streams.copy(gzipInputStream, bytesStreamOutput); return bytesStreamOutput.bytes(); } catch (Exception e) { - throw new AssertionError("unexpected", e); + return fail(e); } } diff --git a/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java b/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java index bbb14fe6e9133..598cc8213e91f 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java @@ -310,6 +310,7 @@ public void testAddComplexWarning() { + ".ml-stats,.monitoring-beats-mb,.monitoring-ent-search-mb,.monitoring-es-mb,.monitoring-kibana-mb," + ".monitoring-logstash-mb,.profiling-ilm-lock,.slm-history,.watch-history-16,behavioral_analytics-events-default," + "ilm-history,logs,metrics,profiling-events,profiling-executables,profiling-metrics,profiling-returnpads-private," + + "profiling-costs" + "profiling-sq-executables,profiling-sq-leafframes,profiling-stackframes,profiling-stacktraces," + "profiling-symbols,synthetics] with patterns (.deprecation-indexing-template => [.logs-deprecation.*]," + ".fleet-file-data => [.fleet-file-data-*-*],.fleet-files => [.fleet-files-*-*],.ml-anomalies- => [.ml-anomalies-*]," @@ -322,6 +323,7 @@ public void testAddComplexWarning() { + "logs => [logs-*-*],metrics => [metrics-*-*],profiling-events => [profiling-events*],profiling-executables => " + "[profiling-executables*],profiling-metrics => [profiling-metrics*],profiling-returnpads-private => " + "[.profiling-returnpads-private*],profiling-sq-executables => [.profiling-sq-executables*]," + + "profiling-costs => [.profiling-costs*]," + "profiling-sq-leafframes => [.profiling-sq-leafframes*],profiling-stackframes => [profiling-stackframes*]," + "profiling-stacktraces => [profiling-stacktraces*],profiling-symbols => [.profiling-symbols*],synthetics => " + "[synthetics-*-*]); this template [global] may be ignored in favor of a composable template at index creation time" diff --git a/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java b/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java index e36d5eed61c1f..30412059394cd 100644 --- a/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/CancellableSingleObjectCacheTests.java @@ -355,7 +355,7 @@ protected boolean isFresh(String currentKey, String newKey) { if (e instanceof TaskCancelledException) { assertTrue(cancel); } else { - throw new AssertionError("unexpected", e); + fail(e); } })); }); diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedThrottledTaskRunnerTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedThrottledTaskRunnerTests.java index 79266f12c0208..49c091c65c71a 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedThrottledTaskRunnerTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedThrottledTaskRunnerTests.java @@ -68,7 +68,7 @@ public void doRun() { @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } } diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThrottledIteratorTests.java b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThrottledIteratorTests.java index 7bc147b77dfd6..c4b9b36c449ac 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/ThrottledIteratorTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/concurrent/ThrottledIteratorTests.java @@ -84,7 +84,7 @@ protected void doRun() { try { assertTrue(itemStartLatch.await(30, TimeUnit.SECONDS)); } catch (InterruptedException e) { - throw new AssertionError("unexpected", e); + fail(e); } finally { blockPermits.release(); } @@ -99,7 +99,7 @@ public void onAfter() { @Override public void onFailure(Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } }); } else { diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 3b1b316e44bae..da1f2aa89642b 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -43,6 +43,7 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.BiConsumer; import java.util.function.Supplier; @@ -115,7 +116,8 @@ private DiscoveryModule newModule( mock(RerouteService.class), null, new NoneCircuitBreakerService(), - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); } diff --git a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java index c61187066487d..ce5841d066d88 100644 --- a/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/PeerFinderTests.java @@ -556,7 +556,7 @@ public void handleResponse(PeersResponse response) { @Override public void handleException(TransportException exp) { - throw new AssertionError("unexpected", exp); + fail(exp); } } ); diff --git a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java index cfcaeb9d9d704..112f96562b7cd 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -584,7 +585,7 @@ public void testIndexCompatibilityChecks() throws IOException { } Version oldVersion = Version.fromId(between(1, Version.CURRENT.minimumCompatibilityVersion().id - 1)); - IndexVersion oldIndexVersion = IndexVersion.fromId(between(1, IndexVersion.MINIMUM_COMPATIBLE.id() - 1)); + IndexVersion oldIndexVersion = IndexVersion.fromId(between(1, IndexVersions.MINIMUM_COMPATIBLE.id() - 1)); Version previousNodeVersion = Version.fromId(between(Version.CURRENT.minimumCompatibilityVersion().id, Version.CURRENT.id - 1)); overrideOldestIndexVersion(oldIndexVersion, previousNodeVersion, env.nodeDataPaths()); @@ -608,7 +609,7 @@ public void testIndexCompatibilityChecks() throws IOException { ); // This should work - overrideOldestIndexVersion(IndexVersion.MINIMUM_COMPATIBLE, previousNodeVersion, env.nodeDataPaths()); + overrideOldestIndexVersion(IndexVersions.MINIMUM_COMPATIBLE, previousNodeVersion, env.nodeDataPaths()); checkForIndexCompatibility(logger, env.dataPaths()); // Trying to boot with newer version should pass this check diff --git a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java index 6a309767878c4..fc89e4d2176c1 100644 --- a/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/env/NodeMetadataTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.gateway.MetadataStateFormat; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; import org.elasticsearch.test.TransportVersionUtils; @@ -76,7 +77,7 @@ public void testEqualsHashcodeSerialization() { public void testReadsFormatWithoutVersion() throws IOException { // the behaviour tested here is only appropriate if the current version is compatible with versions 7 and earlier - assertTrue(IndexVersion.MINIMUM_COMPATIBLE.onOrBefore(IndexVersion.V_7_0_0)); + assertTrue(IndexVersions.MINIMUM_COMPATIBLE.onOrBefore(IndexVersions.V_7_0_0)); // when the current version is incompatible with version 7, the behaviour should change to reject files like the given resource // which do not have the version field diff --git a/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java new file mode 100644 index 0000000000000..0a799934ae64e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/features/FeatureServiceTests.java @@ -0,0 +1,166 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.features; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; + +public class FeatureServiceTests extends ESTestCase { + + private static class TestFeatureSpecification implements FeatureSpecification { + private final Set features; + private final Map historicalFeatures; + + private TestFeatureSpecification(Set features, Map historicalFeatures) { + this.features = features; + this.historicalFeatures = historicalFeatures; + } + + @Override + public Set getFeatures() { + return features; + } + + @Override + public Map getHistoricalFeatures() { + return historicalFeatures; + } + } + + public void testFailsDuplicateFeatures() { + // these all need to be separate classes to trigger the exception + FeatureSpecification fs1 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) { + }; + FeatureSpecification fs2 = new TestFeatureSpecification(Set.of(new NodeFeature("f1")), Map.of()) { + }; + FeatureSpecification hfs1 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) { + }; + FeatureSpecification hfs2 = new TestFeatureSpecification(Set.of(), Map.of(new NodeFeature("f1"), Version.V_8_11_0)) { + }; + + assertThat( + expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, fs2))).getMessage(), + containsString("Duplicate feature") + ); + assertThat( + expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(hfs1, hfs2))).getMessage(), + containsString("Duplicate feature") + ); + assertThat( + expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs1, hfs1))).getMessage(), + containsString("Duplicate feature") + ); + } + + public void testFailsNonHistoricalVersion() { + FeatureSpecification fs = new TestFeatureSpecification( + Set.of(), + Map.of(new NodeFeature("f1"), FeatureService.CLUSTER_FEATURES_ADDED_VERSION) + ); + + assertThat( + expectThrows(IllegalArgumentException.class, () -> new FeatureService(List.of(fs))).getMessage(), + containsString("not a historical version") + ); + } + + public void testGetNodeFeaturesCombinesAllSpecs() { + List specs = List.of( + new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()), + new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()), + new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()), + new TestFeatureSpecification(Set.of(), Map.of()) + ); + + FeatureService service = new FeatureService(specs); + assertThat(service.getNodeFeatures(), containsInAnyOrder("f1", "f2", "f3", "f4", "f5")); + } + + public void testStateHasFeatures() { + List specs = List.of( + new TestFeatureSpecification(Set.of(new NodeFeature("f1"), new NodeFeature("f2")), Map.of()), + new TestFeatureSpecification(Set.of(new NodeFeature("f3")), Map.of()), + new TestFeatureSpecification(Set.of(new NodeFeature("f4"), new NodeFeature("f5")), Map.of()), + new TestFeatureSpecification(Set.of(), Map.of()) + ); + + ClusterState state = ClusterState.builder(ClusterName.DEFAULT) + .nodeFeatures( + Map.of("node1", Set.of("f1", "f2", "nf1"), "node2", Set.of("f1", "f2", "nf2"), "node3", Set.of("f1", "f2", "nf1")) + ) + .build(); + + FeatureService service = new FeatureService(specs); + assertTrue(service.clusterHasFeature(state, new NodeFeature("f1"))); + assertTrue(service.clusterHasFeature(state, new NodeFeature("f2"))); + assertFalse(service.clusterHasFeature(state, new NodeFeature("nf1"))); + assertFalse(service.clusterHasFeature(state, new NodeFeature("nf2"))); + assertFalse(service.clusterHasFeature(state, new NodeFeature("nf3"))); + } + + private static ClusterState stateWithMinVersion(Version version) { + DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(); + nodes.add(DiscoveryNodeUtils.builder("node").version(version, IndexVersions.ZERO, IndexVersion.current()).build()); + for (int n = randomInt(5); n >= 0; n--) { + nodes.add( + DiscoveryNodeUtils.builder("node" + n) + .version( + VersionUtils.randomVersionBetween(random(), version, Version.CURRENT), + IndexVersions.ZERO, + IndexVersion.current() + ) + .build() + ); + } + + return ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build(); + } + + public void testStateHasHistoricalFeatures() { + NodeFeature v8_11_0 = new NodeFeature("hf_8.11.0"); + NodeFeature v8_10_0 = new NodeFeature("hf_8.10.0"); + NodeFeature v7_17_0 = new NodeFeature("hf_7.17.0"); + List specs = List.of( + new TestFeatureSpecification(Set.of(), Map.of(v8_11_0, Version.V_8_11_0)), + new TestFeatureSpecification(Set.of(), Map.of(v8_10_0, Version.V_8_10_0)), + new TestFeatureSpecification(Set.of(), Map.of(v7_17_0, Version.V_7_17_0)) + ); + + FeatureService service = new FeatureService(specs); + assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_11_0)); + assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v8_10_0)); + assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_11_0), v7_17_0)); + + assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_11_0)); + assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v8_10_0)); + assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_8_10_0), v7_17_0)); + + assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_11_0)); + assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v8_10_0)); + assertTrue(service.clusterHasFeature(stateWithMinVersion(Version.V_7_17_0), v7_17_0)); + + assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_11_0)); + assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v8_10_0)); + assertFalse(service.clusterHasFeature(stateWithMinVersion(Version.V_7_16_0), v7_17_0)); + } +} diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index 4e76b2cad6608..bad6702e8ad83 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.gateway.PersistedClusterStateService.Writer; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.ESTestCase; @@ -1489,7 +1490,7 @@ public void testOldestIndexVersionIsCorrectlySerialized() throws IOException { final Path[] dataPaths2 = createDataPaths(); final Path[] combinedPaths = Stream.concat(Arrays.stream(dataPaths1), Arrays.stream(dataPaths2)).toArray(Path[]::new); - IndexVersion oldVersion = IndexVersion.fromId(IndexVersion.MINIMUM_COMPATIBLE.id() - 1); + IndexVersion oldVersion = IndexVersion.fromId(IndexVersions.MINIMUM_COMPATIBLE.id() - 1); final IndexVersion[] indexVersions = new IndexVersion[] { oldVersion, @@ -1686,7 +1687,7 @@ public void testFailsIfMappingIsMissing() throws IOException { final String message = expectThrows(CorruptStateException.class, () -> persistedClusterStateService.loadBestOnDiskState()) .getCause() .getMessage(); - assertEquals("java.lang.IllegalArgumentException: mapping with hash [" + hash + "] not found", message); + assertEquals("java.lang.IllegalArgumentException: mapping of index [test-1] with hash [" + hash + "] not found", message); } } diff --git a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index f4b1c867b9b21..06e83b8051bb6 100644 --- a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.RetentionLease; @@ -66,7 +67,7 @@ import static org.hamcrest.Matchers.hasSize; public class ReplicaShardAllocatorTests extends ESAllocationTestCase { - private static final String MIN_SUPPORTED_LUCENE_VERSION = IndexVersion.MINIMUM_COMPATIBLE.luceneVersion().toString(); + private static final String MIN_SUPPORTED_LUCENE_VERSION = IndexVersions.MINIMUM_COMPATIBLE.luceneVersion().toString(); private final ShardId shardId = new ShardId("test", "_na_", 0); private final DiscoveryNode node1 = newNode("node1"); diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 63cdc2c485197..409023afc4576 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -692,7 +692,7 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBody co } return new TestHttpResponse(status, bso.bytes()); } catch (IOException e) { - throw new AssertionError("unexpected", e); + return fail(e); } } }; diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index b6700f0cf775f..3d95152c88cc5 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -385,7 +385,7 @@ public void testStatelessMinRefreshInterval() { .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) .put(EXISTING_SHARDS_ALLOCATOR_SETTING.getKey(), "stateless") .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), "2s") - .put(SETTING_INDEX_VERSION_CREATED.getKey(), IndexVersion.V_8_10_0.id() + 1) + .put(SETTING_INDEX_VERSION_CREATED.getKey(), IndexVersions.V_8_10_0.id() + 1) .build() ); final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IndexSettings(metadata, Settings.EMPTY)); diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 92e080d85c0c2..d2b2926af7d4c 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -161,7 +161,7 @@ public void testSortingAgainstAliases() { public void testSortingAgainstAliasesPre713() { IndexSettings indexSettings = indexSettings( - Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.V_7_12_0).put("index.sort.field", "field").build() + Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_7_12_0).put("index.sort.field", "field").build() ); MappedFieldType aliased = new KeywordFieldMapper.KeywordFieldType("aliased"); Sort sort = buildIndexSort(indexSettings, Map.of("field", aliased)); diff --git a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java index 60eb7313c71d7..e13ce9702cdfd 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexVersionTests.java @@ -32,8 +32,8 @@ public class IndexVersionTests extends ESTestCase { public void testVersionComparison() { - IndexVersion V_7_2_0 = IndexVersion.V_7_2_0; - IndexVersion V_8_0_0 = IndexVersion.V_8_0_0; + IndexVersion V_7_2_0 = IndexVersions.V_7_2_0; + IndexVersion V_8_0_0 = IndexVersions.V_8_0_0; assertThat(V_7_2_0.before(V_8_0_0), is(true)); assertThat(V_7_2_0.before(V_7_2_0), is(false)); assertThat(V_8_0_0.before(V_7_2_0), is(false)); @@ -70,7 +70,7 @@ public static class DuplicatedIdFakeVersion { public void testStaticIndexVersionChecks() { assertThat( - IndexVersion.getAllVersionIds(IndexVersionTests.CorrectFakeVersion.class), + IndexVersions.getAllVersionIds(IndexVersionTests.CorrectFakeVersion.class), equalTo( Map.of( 199, @@ -84,7 +84,7 @@ public void testStaticIndexVersionChecks() { ) ) ); - AssertionError e = expectThrows(AssertionError.class, () -> IndexVersion.getAllVersionIds(DuplicatedIdFakeVersion.class)); + AssertionError e = expectThrows(AssertionError.class, () -> IndexVersions.getAllVersionIds(DuplicatedIdFakeVersion.class)); assertThat(e.getMessage(), containsString("have the same version number")); } @@ -154,7 +154,7 @@ public void testMinimumCompatibleVersion() { } public void testVersionConstantPresent() { - Set ignore = Set.of(IndexVersion.ZERO, IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE); + Set ignore = Set.of(IndexVersions.ZERO, IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE); assertThat(IndexVersion.current(), sameInstance(IndexVersion.fromId(IndexVersion.current().id()))); assertThat(IndexVersion.current().luceneVersion(), equalTo(org.apache.lucene.util.Version.LATEST)); final int iters = scaledRandomIntBetween(20, 100); @@ -167,7 +167,7 @@ public void testVersionConstantPresent() { } public void testCURRENTIsLatest() { - assertThat(Collections.max(IndexVersion.getAllVersions()), is(IndexVersion.current())); + assertThat(Collections.max(IndexVersions.getAllVersions()), is(IndexVersion.current())); } public void testToString() { diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 0018c9cf1d7da..fa55d24bdbc48 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.analysis.PreBuiltAnalyzers; @@ -51,7 +52,7 @@ public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() { public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { assertThat( PreBuiltAnalyzers.KEYWORD.getAnalyzer(IndexVersion.current()), - is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(IndexVersion.MINIMUM_COMPATIBLE)) + is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(IndexVersions.MINIMUM_COMPATIBLE)) ); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index aed83cf8abd95..072851789f2e0 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -99,6 +99,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; @@ -6677,7 +6678,7 @@ public long softUpdateDocuments(Term term, Iterable getCommitExtraUserData() { - return Map.of("userkey", "userdata", ES_VERSION, IndexVersion.ZERO.toString()); + return Map.of("userkey", "userdata", ES_VERSION, IndexVersions.ZERO.toString()); } }; engine.skipTranslogRecovery(); @@ -7502,14 +7503,14 @@ public void testTrimUnsafeCommitHasESVersionInUserData() throws IOException { .setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE); try (IndexWriter indexWriter = new IndexWriter(store.directory(), indexWriterConfig)) { Map commitUserDataWithOlderVersion = new HashMap<>(committedSegmentsInfo.userData); - commitUserDataWithOlderVersion.put(ES_VERSION, IndexVersion.V_7_0_0.toString()); + commitUserDataWithOlderVersion.put(ES_VERSION, IndexVersions.V_7_0_0.toString()); indexWriter.setLiveCommitData(commitUserDataWithOlderVersion.entrySet()); indexWriter.commit(); } Map userDataBeforeTrimUnsafeCommits = store.readLastCommittedSegmentsInfo().getUserData(); assertThat(userDataBeforeTrimUnsafeCommits, hasKey(ES_VERSION)); - assertThat(userDataBeforeTrimUnsafeCommits.get(ES_VERSION), is(equalTo(IndexVersion.V_7_0_0.toString()))); + assertThat(userDataBeforeTrimUnsafeCommits.get(ES_VERSION), is(equalTo(IndexVersions.V_7_0_0.toString()))); store.trimUnsafeCommits(config.getTranslogConfig().getTranslogPath()); diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 5c35733feedef..e79b088893acd 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -29,7 +29,6 @@ public void testQueryRewriteOnRefresh() throws Exception { indicesAdmin().prepareCreate("index") .setMapping("s", "type=text") .setSettings(indexSettings(1, 0).put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); // Index some documents @@ -48,7 +47,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) .get(); - assertSearchResponse(r1); + assertNoFailures(r1); assertThat(r1.getHits().getTotalHits().value, equalTo(3L)); assertRequestCacheStats(0, 1); @@ -58,7 +57,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) .get(); - assertSearchResponse(r2); + assertNoFailures(r2); assertThat(r2.getHits().getTotalHits().value, equalTo(3L)); assertRequestCacheStats(1, 1); @@ -73,7 +72,7 @@ public void testQueryRewriteOnRefresh() throws Exception { .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")) .get(); - assertSearchResponse(r3); + assertNoFailures(r3); assertThat(r3.getHits().getTotalHits().value, equalTo(5L)); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index e8c2db2ab2616..70e2fee7a003a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.util.List; +import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; @@ -262,6 +263,12 @@ public List invalidExample() throws IOException { }; } + @Override + protected Function loadBlockExpected() { + // Just assert that we expect a boolean. Otherwise no munging. + return v -> (Boolean) v; + } + protected IngestScriptSupport ingestScriptSupport() { return new IngestScriptSupport() { @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index 3df28170938e1..8d5a47f08c663 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -410,6 +410,19 @@ public XContentParser parser() { } } + public void testBlockLoader() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [false]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [true]}")))); + try (DirectoryReader reader = iw.getReader()) { + BooleanScriptFieldType fieldType = build("xor_param", Map.of("param", false), OnScriptError.FAIL); + List expected = List.of(false, true); + assertThat(blockLoaderReadValues(reader, fieldType), equalTo(expected)); + assertThat(blockLoaderReadValuesFromSingleDoc(reader, fieldType), equalTo(expected)); + } + } + } + private void assertSameCount(IndexSearcher searcher, String source, Object queryDescription, Query scriptedQuery, Query ootbQuery) throws IOException { assertThat( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java index d66d927e8e0d0..829e2fcbe79d6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ContentPathTests.java @@ -23,7 +23,10 @@ public void testAddPath() { public void testRemovePath() { ContentPath contentPath = new ContentPath(); contentPath.add("foo"); + String[] path = contentPath.getPath(); + assertEquals("foo", path[0]); contentPath.remove(); + assertNull(path[0]); assertEquals(0, contentPath.length()); String pathAsText = contentPath.pathAsText("bar"); assertEquals("bar", pathAsText); @@ -33,21 +36,7 @@ public void testRemovePathException() { ContentPath contentPath = new ContentPath(); contentPath.add("foo"); contentPath.remove(); - expectThrows(IllegalStateException.class, contentPath::remove); - } - - public void testRootPath() { - ContentPath contentPath = new ContentPath(); - assertEquals("root", contentPath.pathAsText("root")); - assertEquals(0, contentPath.length()); - } - - public void testNestedPath() { - ContentPath contentPath = new ContentPath(); - contentPath.add("root"); - contentPath.add("inner"); - assertEquals("root.inner.leaf1", contentPath.pathAsText("leaf1")); - assertEquals("root.inner.leaf2", contentPath.pathAsText("leaf2")); + expectThrows(IndexOutOfBoundsException.class, contentPath::remove); } public void testBehaviourWithLongerPath() { @@ -93,15 +82,6 @@ public void testPathAsText() { assertEquals("foo.bar.baz", contentPath.pathAsText("baz")); } - public void testPathTextAfterLeafRemoval() { - ContentPath contentPath = new ContentPath(); - contentPath.add("root"); - contentPath.add("inner"); - contentPath.add("leaf"); - contentPath.remove(); - assertEquals("root.inner.newLeaf", contentPath.pathAsText("newLeaf")); - } - public void testPathAsTextAfterRemove() { ContentPath contentPath = new ContentPath(); contentPath.add("foo"); @@ -120,27 +100,4 @@ public void testPathAsTextAfterRemoveAndMoreAdd() { contentPath.add("baz"); assertEquals("foo.baz.qux", contentPath.pathAsText("qux")); } - - public void testPathTextAfterRootRemovalAndNewPathAdded() { - ContentPath contentPath = new ContentPath(); - contentPath.add("root"); - contentPath.add("inner"); - contentPath.add("leaf"); - contentPath.remove(); - contentPath.remove(); - contentPath.remove(); - contentPath.add("newRoot"); - contentPath.add("newInner"); - assertEquals("newRoot.newInner.newLeaf", contentPath.pathAsText("newLeaf")); - } - - public void testPathTextRemovalAfterPathAsTextHasBeenCalled() { - ContentPath contentPath = new ContentPath(); - contentPath.add("root"); - contentPath.add("inner"); - contentPath.pathAsText("leaf"); - contentPath.remove(); - contentPath.add("newInner"); - assertEquals("root.newInner.newLeaf", contentPath.pathAsText("newLeaf")); - } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index 091412826b79e..9e9437aa6b9db 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; import org.elasticsearch.script.DateFieldScript; import org.elasticsearch.script.ScriptService; @@ -33,6 +34,7 @@ import java.util.ArrayList; import java.util.Comparator; import java.util.List; +import java.util.function.Function; import static org.elasticsearch.index.mapper.DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER; import static org.hamcrest.Matchers.containsString; @@ -230,7 +232,7 @@ public void testBadNullValue() throws IOException { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> createDocumentMapper(IndexVersion.V_8_0_0, fieldMapping(b -> b.field("type", "date").field("null_value", "foo"))) + () -> createDocumentMapper(IndexVersions.V_8_0_0, fieldMapping(b -> b.field("type", "date").field("null_value", "foo"))) ); assertThat( @@ -241,7 +243,7 @@ public void testBadNullValue() throws IOException { ) ); - createDocumentMapper(IndexVersion.V_7_9_0, fieldMapping(b -> b.field("type", "date").field("null_value", "foo"))); + createDocumentMapper(IndexVersions.V_7_9_0, fieldMapping(b -> b.field("type", "date").field("null_value", "foo"))); assertWarnings("Error parsing [foo] as date in [null_value] on field [field]); [null_value] will be ignored"); } @@ -576,7 +578,12 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) public SyntheticSourceExample example(int maxValues) { if (randomBoolean()) { Tuple v = generateValue(); - return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping); + return new SyntheticSourceExample( + v.v1(), + v.v2(), + resolution.convert(Instant.from(formatter.parse(v.v2()))), + this::mapping + ); } List> values = randomList(1, maxValues, this::generateValue); List in = values.stream().map(Tuple::v1).toList(); @@ -587,7 +594,10 @@ public SyntheticSourceExample example(int maxValues) { .map(Tuple::v2) .toList(); Object out = outList.size() == 1 ? outList.get(0) : outList; - return new SyntheticSourceExample(in, out, this::mapping); + + List outBlockList = outList.stream().map(v -> resolution.convert(Instant.from(formatter.parse(v)))).toList(); + Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + return new SyntheticSourceExample(in, out, outBlock, this::mapping); } private Tuple generateValue() { @@ -695,6 +705,11 @@ public void execute() { }; } + @Override + protected Function loadBlockExpected() { + return v -> ((Number) v).longValue(); + } + public void testLegacyField() throws Exception { // check that unknown date formats are treated leniently on old indices MapperService service = createMapperService(IndexVersion.fromId(5000099), Settings.EMPTY, () -> false, mapping(b -> { @@ -727,8 +742,8 @@ public void testLegacyDateFormatName() { // BWC compatible index, e.g 7.x IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java index cf28b46118d65..d1652b9f57716 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DateScriptFieldTypeTests.java @@ -471,6 +471,18 @@ public void testLegacyDateFormatName() throws IOException { ); } + public void testBlockLoader() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181355]}")))); + try (DirectoryReader reader = iw.getReader()) { + DateScriptFieldType fieldType = build("add_days", Map.of("days", 1), OnScriptError.FAIL); + assertThat(blockLoaderReadValues(reader, fieldType), equalTo(List.of(1595518581354L, 1595518581355L))); + assertThat(blockLoaderReadValuesFromSingleDoc(reader, fieldType), equalTo(List.of(1595518581354L, 1595518581355L))); + } + } + } + @Override protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { return ft.termsQuery(randomList(1, 100, DateScriptFieldTypeTests::randomDate), ctx); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java index f9fcbebe221d4..c55ffaaa70a16 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java @@ -15,6 +15,9 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; public class DotExpandingXContentParserTests extends ESTestCase { @@ -348,4 +351,94 @@ public void testGetTokenLocation() throws IOException { assertNull(dotExpandedParser.nextToken()); assertNull(expectedParser.nextToken()); } + + public void testParseMapUOE() throws Exception { + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, ""), + new ContentPath() + ); + expectThrows(UnsupportedOperationException.class, dotExpandedParser::map); + } + + public void testParseMapOrderedUOE() throws Exception { + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, ""), + new ContentPath() + ); + expectThrows(UnsupportedOperationException.class, dotExpandedParser::mapOrdered); + } + + public void testParseMapStringsUOE() throws Exception { + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, ""), + new ContentPath() + ); + expectThrows(UnsupportedOperationException.class, dotExpandedParser::mapStrings); + } + + public void testParseMapSupplierUOE() throws Exception { + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, ""), + new ContentPath() + ); + expectThrows(UnsupportedOperationException.class, () -> dotExpandedParser.map(HashMap::new, XContentParser::text)); + } + + public void testParseMap() throws Exception { + String jsonInput = """ + {"params":{"one":"one", + "two":"two"}}\ + """; + + ContentPath contentPath = new ContentPath(); + contentPath.setWithinLeafObject(true); + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, jsonInput), + contentPath + ); + assertEquals(XContentParser.Token.START_OBJECT, dotExpandedParser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, dotExpandedParser.nextToken()); + assertEquals("params", dotExpandedParser.currentName()); + assertEquals(XContentParser.Token.START_OBJECT, dotExpandedParser.nextToken()); + Map map = dotExpandedParser.map(); + assertEquals(2, map.size()); + assertEquals("one", map.get("one")); + assertEquals("two", map.get("two")); + } + + public void testParseListUOE() throws Exception { + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, ""), + new ContentPath() + ); + expectThrows(UnsupportedOperationException.class, dotExpandedParser::list); + } + + public void testParseListOrderedUOE() throws Exception { + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, ""), + new ContentPath() + ); + expectThrows(UnsupportedOperationException.class, dotExpandedParser::listOrderedMap); + } + + public void testParseList() throws Exception { + String jsonInput = """ + {"params":["one","two"]}\ + """; + + ContentPath contentPath = new ContentPath(); + contentPath.setWithinLeafObject(true); + XContentParser dotExpandedParser = DotExpandingXContentParser.expandDots( + createParser(JsonXContent.jsonXContent, jsonInput), + contentPath + ); + assertEquals(XContentParser.Token.START_OBJECT, dotExpandedParser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, dotExpandedParser.nextToken()); + assertEquals("params", dotExpandedParser.currentName()); + List list = dotExpandedParser.list(); + assertEquals(2, list.size()); + assertEquals("one", list.get(0)); + assertEquals("two", list.get(1)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index d360dc9796d89..0f05dad8098f4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -230,6 +230,18 @@ public void testTermsQuery() throws IOException { } } + public void testBlockLoader() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + try (DirectoryReader reader = iw.getReader()) { + DoubleScriptFieldType fieldType = build("add_param", Map.of("param", 1), OnScriptError.FAIL); + assertThat(blockLoaderReadValues(reader, fieldType), equalTo(List.of(2d, 3d))); + assertThat(blockLoaderReadValuesFromSingleDoc(reader, fieldType), equalTo(List.of(2d, 3d))); + } + } + } + @Override protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { return ft.termsQuery(List.of(randomLong()), ctx); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index 6c2a02df1db24..54db5832c2726 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -194,8 +195,8 @@ public void testDynamicMapperWithBadMapping() throws IOException { // in 7.x versions this will issue a deprecation warning IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ); DocumentMapper mapper = createDocumentMapper(version, topMapping(b -> { b.startArray("dynamic_templates"); @@ -667,7 +668,7 @@ public void testIllegalDynamicTemplate7DotXIndex() throws Exception { mapping.endObject(); } mapping.endObject(); - IndexVersion createdVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_7_0); + IndexVersion createdVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_7_0); MapperService mapperService = createMapperService(createdVersion, mapping); assertThat(mapperService.documentMapper().mappingSource().toString(), containsString("\"type\":\"string\"")); assertWarnings(""" diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index e08bb1bf30a09..0b5fef2b5971c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.termvectors.TermVectorsService; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentFactory; @@ -86,7 +86,7 @@ public void testUsingEnabledSettingThrows() { public void testUsingEnabledBefore8() throws Exception { DocumentMapper docMapper = createDocumentMapper( - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0), + IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), topMapping(b -> b.startObject("_field_names").field("enabled", false).endObject()) ); @@ -103,7 +103,7 @@ public void testUsingEnabledBefore8() throws Exception { */ public void testMergingMappingsBefore8() throws Exception { MapperService mapperService = createMapperService( - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0), + IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), mapping(b -> {}) ); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java index 36bb70210a439..3798129ccff29 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FloatFieldMapperTests.java @@ -14,6 +14,7 @@ import java.io.IOException; import java.util.List; +import java.util.function.Function; public class FloatFieldMapperTests extends NumberFieldMapperTests { @@ -54,6 +55,15 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) return new NumberSyntheticSourceSupport(Number::floatValue, ignoreMalformed); } + @Override + protected Function loadBlockExpected() { + return v -> { + // The test converts the float into a string so we do do + Number n = (Number) v; + return Double.parseDouble(Float.toString(n.floatValue())); + }; + } + @Override protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java index 13b6644520745..cc024efb5f307 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/HalfFloatFieldMapperTests.java @@ -16,6 +16,7 @@ import java.io.IOException; import java.util.List; +import java.util.function.Function; public class HalfFloatFieldMapperTests extends NumberFieldMapperTests { @@ -54,6 +55,17 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) ); } + @Override + protected Function loadBlockExpected() { + return v -> { + // The test converts the float into a string so we do do + Number n = (Number) v; + return Double.parseDouble( + Float.toString(HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(n.floatValue()))) + ); + }; + } + @Override protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index e53edc118b43f..ba9c2e6c4a299 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; @@ -27,6 +28,7 @@ import java.net.InetAddress; import java.util.ArrayList; import java.util.List; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -203,7 +205,7 @@ public void testNullValue() throws IOException { "Failed to parse mapping: Error parsing [null_value] on field [field]: ':1' is not an IP string literal." ); - createDocumentMapper(IndexVersion.V_7_9_0, fieldMapping(b -> { + createDocumentMapper(IndexVersions.V_7_9_0, fieldMapping(b -> { b.field("type", "ip"); b.field("null_value", ":1"); })); @@ -385,6 +387,7 @@ public List invalidExample() throws IOException { } } + @Override protected IngestScriptSupport ingestScriptSupport() { return new IngestScriptSupport() { @Override @@ -418,4 +421,9 @@ public void execute() { } }; } + + @Override + protected Function loadBlockExpected() { + return v -> InetAddresses.toAddrString(InetAddressPoint.decode(BytesRef.deepCopyOf((BytesRef) v).bytes)); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index 26e1763a12c21..56ca5f3dae89f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; @@ -25,6 +26,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; +import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.fielddata.BinaryScriptFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues.Strings; @@ -244,6 +246,22 @@ public void testTermsQuery() throws IOException { } } + public void testBlockLoader() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.0\"]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [\"192.168.1\"]}")))); + try (DirectoryReader reader = iw.getReader()) { + IpScriptFieldType fieldType = build("append_param", Map.of("param", ".1"), OnScriptError.FAIL); + List expected = List.of( + new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))), + new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.1.1"))) + ); + assertThat(blockLoaderReadValues(reader, fieldType), equalTo(expected)); + assertThat(blockLoaderReadValuesFromSingleDoc(reader, fieldType), equalTo(expected)); + } + } + } + @Override protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { return ft.termsQuery(randomList(100, () -> randomIp(randomBoolean())), ctx); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 94e2506d2b2a7..eafb33cd44cd4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -50,6 +50,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.stream.Collectors; import static java.util.Collections.singletonList; @@ -648,6 +649,11 @@ protected boolean supportsIgnoreMalformed() { return false; } + @Override + protected Function loadBlockExpected() { + return v -> ((BytesRef) v).utf8ToString(); + } + @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { assertFalse("keyword doesn't support ignore_malformed", ignoreMalformed); @@ -658,6 +664,7 @@ static class KeywordSyntheticSourceSupport implements SyntheticSourceSupport { private final Integer ignoreAbove = randomBoolean() ? null : between(10, 100); private final boolean allIgnored = ignoreAbove != null && rarely(); private final boolean store; + private final boolean docValues; private final String nullValue; private final boolean exampleSortsUsingIgnoreAbove; @@ -665,13 +672,18 @@ static class KeywordSyntheticSourceSupport implements SyntheticSourceSupport { this.store = store; this.nullValue = nullValue; this.exampleSortsUsingIgnoreAbove = exampleSortsUsingIgnoreAbove; + this.docValues = store ? randomBoolean() : true; } @Override public SyntheticSourceExample example(int maxValues) { if (randomBoolean()) { Tuple v = generateValue(); - return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping); + Object loadBlock = v.v2(); + if (ignoreAbove != null && v.v2().length() > ignoreAbove) { + loadBlock = null; + } + return new SyntheticSourceExample(v.v1(), v.v2(), loadBlock, this::mapping); } List> values = randomList(1, maxValues, this::generateValue); List in = values.stream().map(Tuple::v1).toList(); @@ -685,9 +697,13 @@ public SyntheticSourceExample example(int maxValues) { } }); List outList = store ? outPrimary : new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList()); + List loadBlock = docValues + ? new HashSet<>(outPrimary).stream().sorted().collect(Collectors.toList()) + : List.copyOf(outList); + Object loadBlockResult = loadBlock.size() == 1 ? loadBlock.get(0) : loadBlock; outList.addAll(outExtraValues); Object out = outList.size() == 1 ? outList.get(0) : outList; - return new SyntheticSourceExample(in, out, this::mapping); + return new SyntheticSourceExample(in, out, loadBlockResult, this::mapping); } private Tuple generateValue() { @@ -712,9 +728,9 @@ private void mapping(XContentBuilder b) throws IOException { } if (store) { b.field("store", true); - if (randomBoolean()) { - b.field("doc_values", false); - } + } + if (docValues == false) { + b.field("doc_values", false); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index c319ac51803db..65f4c2e3ea6eb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -376,6 +376,21 @@ public void testMatchQuery() throws IOException { } } + public void testBlockLoader() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + try (DirectoryReader reader = iw.getReader()) { + KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-Suffix"), OnScriptError.FAIL); + assertThat(blockLoaderReadValues(reader, fieldType), equalTo(List.of(new BytesRef("1-Suffix"), new BytesRef("2-Suffix")))); + assertThat( + blockLoaderReadValuesFromSingleDoc(reader, fieldType), + equalTo(List.of(new BytesRef("1-Suffix"), new BytesRef("2-Suffix"))) + ); + } + } + } + @Override protected KeywordScriptFieldType simpleMappedFieldType() { return build("read_foo", Map.of(), OnScriptError.FAIL); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java index ad34c407d0678..f2d4431e5c79f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongFieldMapperTests.java @@ -19,6 +19,7 @@ import java.io.IOException; import java.math.BigInteger; import java.util.List; +import java.util.function.Function; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -120,6 +121,17 @@ public void testFetchCoerced() throws IOException { assertFetch(randomFetchTestMapper(), "field", 3.783147882954537E18, randomFetchTestFormat()); } + @Override + protected Function loadBlockExpected() { + return n -> { + Number number = ((Number) n); + if (Integer.MIN_VALUE <= number.longValue() && number.longValue() <= Integer.MAX_VALUE) { + return number.intValue(); + } + return number.longValue(); + }; + } + protected IngestScriptSupport ingestScriptSupport() { return new IngestScriptSupport() { @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index 20ae732f9c5b0..1688cab24af3e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -263,6 +263,18 @@ public void testTermsQuery() throws IOException { } } + public void testBlockLoader() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}")))); + try (DirectoryReader reader = iw.getReader()) { + LongScriptFieldType fieldType = build("add_param", Map.of("param", 1), OnScriptError.FAIL); + assertThat(blockLoaderReadValues(reader, fieldType), equalTo(List.of(2L, 3L))); + assertThat(blockLoaderReadValuesFromSingleDoc(reader, fieldType), equalTo(List.of(2L, 3L))); + } + } + } + @Override protected Query randomTermsQuery(MappedFieldType ft, SearchExecutionContext ctx) { return ft.termsQuery(List.of(randomLong()), ctx); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 69b44d383193a..aa3b083f4496f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.index.IndexVersionUtils; @@ -271,7 +272,7 @@ public void testIsMetadataField() throws IOException { assertFalse(mapperService.isMetadataField(randomAlphaOfLengthBetween(10, 15))); for (String builtIn : IndicesModule.getBuiltInMetadataFields()) { - if (NestedPathFieldMapper.NAME.equals(builtIn) && version.before(IndexVersion.V_8_0_0)) { + if (NestedPathFieldMapper.NAME.equals(builtIn) && version.before(IndexVersions.V_8_0_0)) { continue; // Nested field does not exist in the 7x line } assertTrue("Expected " + builtIn + " to be a metadata field for version " + version, mapperService.isMetadataField(builtIn)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java index ddb81727fd399..abe8e820acae8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/MappingParserTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.IndicesModule; @@ -317,7 +318,7 @@ public void testBlankFieldName() throws Exception { } public void testBlankFieldNameBefore8_6_0() throws Exception { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.V_8_5_0); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersions.V_8_5_0); TransportVersion transportVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.MINIMUM_COMPATIBLE, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 82dcf46960008..621f03813b1ff 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.ObjectMapper.Dynamic; import org.elasticsearch.test.index.IndexVersionUtils; @@ -1006,7 +1007,7 @@ public void testReorderParent() throws IOException { assertThat(doc.docs().size(), equalTo(3)); NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper; - if (version.before(IndexVersion.V_8_0_0)) { + if (version.before(IndexVersions.V_8_0_0)) { assertThat(doc.docs().get(0).get("_type"), equalTo(nested1Mapper.nestedTypePath())); } else { assertThat(doc.docs().get(0).get(NestedPathFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePath())); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedPathFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedPathFieldMapperTests.java index 4338fed4aed08..e652d1e2e85cc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedPathFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedPathFieldMapperTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; @@ -25,7 +26,7 @@ protected String fieldName() { @Override protected boolean isSupportedOn(IndexVersion version) { - return version.onOrAfter(IndexVersion.V_8_0_0); + return version.onOrAfter(IndexVersions.V_8_0_0); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 45a1ac2ced32d..7b91c84a05c53 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.script.ScriptFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; +import org.hamcrest.Matcher; import java.io.IOException; import java.util.ArrayList; @@ -37,6 +38,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.matchesPattern; +import static org.hamcrest.Matchers.notANumber; public abstract class NumberFieldMapperTests extends MapperTestCase { @@ -379,6 +381,16 @@ public void testAllowMultipleValuesField() throws IOException { assertThat(e.getCause().getMessage(), containsString("Only one field can be stored per key")); } + @Override + protected Function loadBlockExpected() { + return n -> ((Number) n); // Just assert it's a number + } + + @Override + protected Matcher blockItemMatcher(Object expected) { + return "NaN".equals(expected) ? notANumber() : equalTo(expected); + } + protected abstract Number randomNumber(); protected final class NumberSyntheticSourceSupport implements SyntheticSourceSupport { @@ -398,10 +410,11 @@ public SyntheticSourceExample example(int maxVals) { if (randomBoolean()) { Tuple v = generateValue(); if (v.v2() instanceof Number n) { - return new SyntheticSourceExample(v.v1(), round.apply(n), this::mapping); + Number result = round.apply(n); + return new SyntheticSourceExample(v.v1(), result, result, this::mapping); } // ignore_malformed value - return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping); + return new SyntheticSourceExample(v.v1(), v.v2(), List.of(), this::mapping); } List> values = randomList(1, maxVals, this::generateValue); List in = values.stream().map(Tuple::v1).toList(); @@ -412,7 +425,14 @@ public SyntheticSourceExample example(int maxVals) { .collect(Collectors.toCollection(ArrayList::new)); values.stream().filter(v -> false == v.v2() instanceof Number).map(v -> v.v2()).forEach(outList::add); Object out = outList.size() == 1 ? outList.get(0) : outList; - return new SyntheticSourceExample(in, out, this::mapping); + + List outBlockList = values.stream() + .filter(v -> v.v2() instanceof Number) + .map(t -> round.apply((Number) t.v2())) + .sorted() + .collect(Collectors.toCollection(ArrayList::new)); + Object outBlock = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; + return new SyntheticSourceExample(in, out, outBlock, this::mapping); } private Tuple generateValue() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java index 191822ad07cfe..9b447d0727152 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ParametrizedMapperTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -531,7 +532,7 @@ public void testBWCunknownParametersfromDynamicTemplates() { {"type":"test_mapper","some_unknown_parameter":true,"required":"value"}"""; TestMapper mapper = fromMapping( mapping, - IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0), + IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_7_0_0, @@ -549,7 +550,7 @@ public void testBWCunknownParametersfromDynamicTemplates() { MapperParsingException ex = expectThrows( MapperParsingException.class, - () -> fromMapping(mapping, IndexVersion.V_8_0_0, TransportVersions.V_8_0_0, true) + () -> fromMapping(mapping, IndexVersions.V_8_0_0, TransportVersions.V_8_0_0, true) ); assertEquals("unknown parameter [some_unknown_parameter] on mapper [field] of type [test_mapper]", ex.getMessage()); } @@ -586,7 +587,7 @@ public void testDeprecatedParameters() { // 'index' is declared explicitly, 'store' is not, but is one of the previously always-accepted params String mapping = """ {"type":"test_mapper","index":false,"store":true,"required":"value"}"""; - TestMapper mapper = fromMapping(mapping, IndexVersion.V_7_8_0, TransportVersions.V_7_8_0); + TestMapper mapper = fromMapping(mapping, IndexVersions.V_7_8_0, TransportVersions.V_7_8_0); assertWarnings("Parameter [store] has no effect on type [test_mapper] and will be removed in future"); assertFalse(mapper.index); assertEquals(""" @@ -594,7 +595,7 @@ public void testDeprecatedParameters() { MapperParsingException e = expectThrows( MapperParsingException.class, - () -> fromMapping(mapping, IndexVersion.V_8_0_0, TransportVersions.V_8_0_0) + () -> fromMapping(mapping, IndexVersions.V_8_0_0, TransportVersions.V_8_0_0) ); assertEquals("unknown parameter [store] on mapper [field] of type [test_mapper]", e.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 02259a24a5e94..bbfeaaa8b9d69 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -82,6 +82,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Function; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -1132,10 +1133,11 @@ public SyntheticSourceExample example(int maxValues) { return new SyntheticSourceExample( delegate.inputValue(), delegate.result(), + delegate.result(), b -> b.field("type", "text").field("store", true) ); } - return new SyntheticSourceExample(delegate.inputValue(), delegate.result(), b -> { + return new SyntheticSourceExample(delegate.inputValue(), delegate.result(), delegate.blockLoaderResult(), b -> { b.field("type", "text"); b.startObject("fields"); { @@ -1181,6 +1183,11 @@ public List invalidExample() throws IOException { }; } + @Override + protected Function loadBlockExpected() { + return v -> ((BytesRef) v).utf8ToString(); + } + @Override protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java index 5eab2951d9e2d..2b704a25e2232 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeParsersTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -85,7 +86,7 @@ public void testMultiFieldWithinMultiField() throws IOException { IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY); when(mapperService.getIndexSettings()).thenReturn(indexSettings); - IndexVersion olderVersion = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion olderVersion = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MappingParserContext olderContext = new MappingParserContext( null, type -> typeParser, @@ -111,7 +112,7 @@ public void testMultiFieldWithinMultiField() throws IOException { // For indices created in 8.0 or later, we should throw an error. Map fieldNodeCopy = XContentHelper.convertToMap(BytesReference.bytes(mapping), true, mapping.contentType()).v2(); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); TransportVersion transportVersion = TransportVersionUtils.randomVersionBetween( random(), TransportVersions.V_8_0_0, diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java index 709223d19788c..20d307a0d4cb1 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldSearchTests.java @@ -40,9 +40,9 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.cardinality; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -131,17 +131,12 @@ public void testQueryStringQuery() throws Exception { ) .get(); - SearchResponse response = client().prepareSearch("test").setQuery(queryStringQuery("flattened.field1:value")).get(); - assertSearchResponse(response); - assertHitCount(response, 1); - - response = client().prepareSearch("test").setQuery(queryStringQuery("flattened.field1:value AND flattened:2.718")).get(); - assertSearchResponse(response); - assertHitCount(response, 1); - - response = client().prepareSearch("test").setQuery(queryStringQuery("2.718").field("flattened.field2")).get(); - assertSearchResponse(response); - assertHitCount(response, 1); + assertHitCountAndNoFailures(client().prepareSearch("test").setQuery(queryStringQuery("flattened.field1:value")), 1); + assertHitCountAndNoFailures( + client().prepareSearch("test").setQuery(queryStringQuery("flattened.field1:value AND flattened:2.718")), + 1 + ); + assertHitCountAndNoFailures(client().prepareSearch("test").setQuery(queryStringQuery("2.718").field("flattened.field2")), 1); } public void testSimpleQueryStringQuery() throws Exception { @@ -159,17 +154,9 @@ public void testSimpleQueryStringQuery() throws Exception { ) .get(); - SearchResponse response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("flattened.field1")).get(); - assertSearchResponse(response); - assertHitCount(response, 1); - - response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("+value +2.718").field("flattened")).get(); - assertSearchResponse(response); - assertHitCount(response, 1); - - response = client().prepareSearch("test").setQuery(simpleQueryStringQuery("+value +3.141").field("flattened")).get(); - assertSearchResponse(response); - assertHitCount(response, 0); + assertHitCountAndNoFailures(client().prepareSearch("test").setQuery(simpleQueryStringQuery("value").field("flattened.field1")), 1); + assertHitCountAndNoFailures(client().prepareSearch("test").setQuery(simpleQueryStringQuery("+value +2.718").field("flattened")), 1); + assertHitCountAndNoFailures(client().prepareSearch("test").setQuery(simpleQueryStringQuery("+value +3.141").field("flattened")), 0); } public void testExists() throws Exception { @@ -226,7 +213,7 @@ public void testCardinalityAggregation() throws IOException { .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened")) .get(); - assertSearchResponse(response); + assertNoFailures(response); Cardinality count = response.getAggregations().get("cardinality"); assertCardinality(count, numDocs, precisionThreshold); @@ -234,7 +221,7 @@ public void testCardinalityAggregation() throws IOException { SearchResponse firstResponse = client().prepareSearch("test") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.first")) .get(); - assertSearchResponse(firstResponse); + assertNoFailures(firstResponse); Cardinality firstCount = firstResponse.getAggregations().get("cardinality"); assertCardinality(firstCount, numDocs, precisionThreshold); @@ -242,7 +229,7 @@ public void testCardinalityAggregation() throws IOException { SearchResponse secondResponse = client().prepareSearch("test") .addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("flattened.second")) .get(); - assertSearchResponse(secondResponse); + assertNoFailures(secondResponse); Cardinality secondCount = secondResponse.getAggregations().get("cardinality"); assertCardinality(secondCount, (numDocs + 1) / 2, precisionThreshold); @@ -281,7 +268,7 @@ public void testTermsAggregation() throws IOException { // Aggregate on the root 'labels' field. TermsAggregationBuilder builder = createTermsAgg("labels"); SearchResponse response = client().prepareSearch("test").addAggregation(builder).get(); - assertSearchResponse(response); + assertNoFailures(response); Terms terms = response.getAggregations().get("terms"); assertThat(terms, notNullValue()); @@ -299,7 +286,7 @@ public void testTermsAggregation() throws IOException { // Aggregate on the 'priority' subfield. TermsAggregationBuilder priorityAgg = createTermsAgg("labels.priority"); SearchResponse priorityResponse = client().prepareSearch("test").addAggregation(priorityAgg).get(); - assertSearchResponse(priorityResponse); + assertNoFailures(priorityResponse); Terms priorityTerms = priorityResponse.getAggregations().get("terms"); assertThat(priorityTerms, notNullValue()); @@ -313,7 +300,7 @@ public void testTermsAggregation() throws IOException { // Aggregate on the 'release' subfield. TermsAggregationBuilder releaseAgg = createTermsAgg("labels.release"); SearchResponse releaseResponse = client().prepareSearch("test").addAggregation(releaseAgg).get(); - assertSearchResponse(releaseResponse); + assertNoFailures(releaseResponse); Terms releaseTerms = releaseResponse.getAggregations().get("terms"); assertThat(releaseTerms, notNullValue()); @@ -328,7 +315,7 @@ public void testTermsAggregation() throws IOException { // Aggregate on the 'priority' subfield with a min_doc_count of 0. TermsAggregationBuilder minDocCountAgg = createTermsAgg("labels.priority").minDocCount(0); SearchResponse minDocCountResponse = client().prepareSearch("test").addAggregation(minDocCountAgg).get(); - assertSearchResponse(minDocCountResponse); + assertNoFailures(minDocCountResponse); Terms minDocCountTerms = minDocCountResponse.getAggregations().get("terms"); assertThat(minDocCountTerms, notNullValue()); @@ -359,7 +346,7 @@ public void testLoadDocValuesFields() throws Exception { .get(); SearchResponse response = client().prepareSearch("test").addDocValueField("flattened").addDocValueField("flattened.key").get(); - assertSearchResponse(response); + assertNoFailures(response); assertHitCount(response, 1); Map fields = response.getHits().getAt(0).getFields(); @@ -409,17 +396,17 @@ public void testFieldSort() throws Exception { .get(); SearchResponse response = client().prepareSearch("test").addSort("flattened", SortOrder.DESC).get(); - assertSearchResponse(response); + assertNoFailures(response); assertHitCount(response, 3); assertOrderedSearchHits(response, "3", "1", "2"); response = client().prepareSearch("test").addSort("flattened.key", SortOrder.DESC).get(); - assertSearchResponse(response); + assertNoFailures(response); assertHitCount(response, 3); assertOrderedSearchHits(response, "2", "1", "3"); response = client().prepareSearch("test").addSort(new FieldSortBuilder("flattened.key").order(SortOrder.DESC).missing("Z")).get(); - assertSearchResponse(response); + assertNoFailures(response); assertHitCount(response, 3); assertOrderedSearchHits(response, "3", "2", "1"); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java index 59ec920e891ee..ff5baf8ba0877 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/BinaryDenseVectorScriptDocValuesTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.script.field.vectors.BinaryDenseVectorDocValuesField; import org.elasticsearch.script.field.vectors.ByteBinaryDenseVectorDocValuesField; @@ -31,7 +32,7 @@ public void testFloatGetVectorValueAndGetMagnitude() throws IOException { float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; float[] expectedMagnitudes = { 1.7320f, 2.4495f, 3.3166f }; - for (IndexVersion indexVersion : List.of(IndexVersion.V_7_4_0, IndexVersion.current())) { + for (IndexVersion indexVersion : List.of(IndexVersions.V_7_4_0, IndexVersion.current())) { BinaryDocValues docValues = wrap(vectors, ElementType.FLOAT, indexVersion); DenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", ElementType.FLOAT, dims, indexVersion); DenseVectorScriptDocValues scriptDocValues = field.toScriptDocValues(); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java index 6d562f88a0100..b10d756a6e458 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapperTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.codec.PerFieldMapperCodec; import org.elasticsearch.index.mapper.DocumentMapper; @@ -62,7 +63,7 @@ public class DenseVectorFieldMapperTests extends MapperTestCase { - private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersion.V_8_10_0; + private static final IndexVersion INDEXED_BY_DEFAULT_PREVIOUS_INDEX_VERSION = IndexVersions.V_8_10_0; private final ElementType elementType; private final boolean indexed; private final boolean indexOptionsSet; @@ -591,7 +592,7 @@ public void testDefaultParamsIndexByDefault() throws Exception { } public void testAddDocumentsToIndexBefore_V_7_5_0() throws Exception { - IndexVersion indexVersion = IndexVersion.V_7_4_0; + IndexVersion indexVersion = IndexVersions.V_7_4_0; DocumentMapper mapper = createDocumentMapper( indexVersion, fieldMapping(b -> b.field("index", false).field("type", "dense_vector").field("dims", 3)) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java index 5643d573867bc..79f6768512b85 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/SparseVectorFieldMapperTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MappedFieldType; @@ -240,7 +241,7 @@ public void testSparseVectorUnsupportedIndex() throws Exception { IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), PREVIOUS_SPARSE_VECTOR_INDEX_VERSION, - IndexVersion.FIRST_DETACHED_INDEX_VERSION + IndexVersions.FIRST_DETACHED_INDEX_VERSION ); Exception e = expectThrows(MapperParsingException.class, () -> createMapperService(version, fieldMapping(b -> { b.field("type", "sparse_vector"); diff --git a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java index fe6a61060d39d..a003436fc0523 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/SimilarityServiceTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.lucene.similarity.LegacyBM25Similarity; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -73,7 +73,7 @@ public float score(float freq, long norm) { }; IllegalArgumentException e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersion.V_7_0_0, negativeScoresSim) + () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, negativeScoresSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarities should not return negative scores")); @@ -98,7 +98,7 @@ public float score(float freq, long norm) { }; e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersion.V_7_0_0, decreasingScoresWithFreqSim) + () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, decreasingScoresWithFreqSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not decrease when term frequency increases")); @@ -123,7 +123,7 @@ public float score(float freq, long norm) { }; e = expectThrows( IllegalArgumentException.class, - () -> SimilarityService.validateSimilarity(IndexVersion.V_7_0_0, increasingScoresWithNormSim) + () -> SimilarityService.validateSimilarity(IndexVersions.V_7_0_0, increasingScoresWithNormSim) ); assertThat(e.getMessage(), Matchers.containsString("Similarity scores should not increase when norm increases")); } diff --git a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java index 1f72c7df8a9aa..b2b5bc7d9444b 100644 --- a/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java +++ b/server/src/test/java/org/elasticsearch/index/snapshots/blobstore/FileInfoTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; import org.elasticsearch.index.store.StoreFileMetadata; import org.elasticsearch.test.ESTestCase; @@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.is; public class FileInfoTests extends ESTestCase { - private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersion.MINIMUM_COMPATIBLE.luceneVersion(); + private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersions.MINIMUM_COMPATIBLE.luceneVersion(); public void testToFromXContent() throws IOException { final int iters = scaledRandomIntBetween(1, 10); diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 1c6314d60673d..dfbfb737c9ab2 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.ReplicationTracker; import org.elasticsearch.index.seqno.RetentionLease; @@ -111,7 +112,7 @@ public class StoreTests extends ESTestCase { "index", Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build() ); - private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersion.MINIMUM_COMPATIBLE.luceneVersion(); + private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersions.MINIMUM_COMPATIBLE.luceneVersion(); public void testRefCount() { final ShardId shardId = new ShardId("index", "_na_", 1); diff --git a/server/src/test/java/org/elasticsearch/index/store/VerifyingIndexOutputTests.java b/server/src/test/java/org/elasticsearch/index/store/VerifyingIndexOutputTests.java index 4cf9c261dc281..d10ee1a7938fc 100644 --- a/server/src/test/java/org/elasticsearch/index/store/VerifyingIndexOutputTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/VerifyingIndexOutputTests.java @@ -14,7 +14,7 @@ import org.apache.lucene.store.IOContext; import org.apache.lucene.util.Version; import org.elasticsearch.common.Numbers; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; @@ -28,7 +28,7 @@ public class VerifyingIndexOutputTests extends ESTestCase { private static final int CHECKSUM_LENGTH = 8; - private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersion.MINIMUM_COMPATIBLE.luceneVersion(); + private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersions.MINIMUM_COMPATIBLE.luceneVersion(); private static final Matcher VERIFICATION_FAILURE = containsString("verification failed (hardware problem?)"); private static final Matcher FOOTER_NOT_CHECKED = allOf(VERIFICATION_FAILURE, containsString("footer=")); private static final Matcher INVALID_LENGTH = allOf(VERIFICATION_FAILURE, containsString("footer=")); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index e081f4ab96252..2ce85a598541e 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.indices; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DocCountFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; @@ -89,7 +90,7 @@ public Map getMetadataMappers() { public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()); assertThat( module.getMapperRegistry().getMapperParser("object", IndexVersion.current()), instanceOf(ObjectMapper.TypeParser.class) @@ -106,8 +107,8 @@ public void testBuiltinMappers() { { IndexVersion version = IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_0_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_0_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ); assertEquals(EXPECTED_METADATA_FIELDS.length - 1, module.getMapperRegistry().getMetadataMapperParsers(version).size()); } @@ -225,14 +226,14 @@ public Map getMetadataMappers() { public void testFieldNamesIsLast() { IndicesModule module = new IndicesModule(Collections.emptyList()); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()); List fieldNames = new ArrayList<>(module.getMapperRegistry().getMetadataMapperParsers(version).keySet()); assertEquals(FieldNamesFieldMapper.NAME, fieldNames.get(fieldNames.size() - 1)); } public void testFieldNamesIsLastWithPlugins() { IndicesModule module = new IndicesModule(fakePlugins); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()); List fieldNames = new ArrayList<>(module.getMapperRegistry().getMetadataMapperParsers(version).keySet()); assertEquals(FieldNamesFieldMapper.NAME, fieldNames.get(fieldNames.size() - 1)); } diff --git a/server/src/test/java/org/elasticsearch/indices/ShardLimitValidatorTests.java b/server/src/test/java/org/elasticsearch/indices/ShardLimitValidatorTests.java index 90f3cb8eacf68..e1b3bb4fe9c49 100644 --- a/server/src/test/java/org/elasticsearch/indices/ShardLimitValidatorTests.java +++ b/server/src/test/java/org/elasticsearch/indices/ShardLimitValidatorTests.java @@ -12,8 +12,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.shards.ShardCounts; @@ -25,8 +25,6 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; -import java.util.HashMap; -import java.util.Map; import java.util.Set; import static org.elasticsearch.cluster.metadata.MetadataIndexStateServiceTests.addClosedIndex; @@ -260,22 +258,13 @@ public static ClusterState createClusterForShardLimitTest( } public static DiscoveryNodes createDiscoveryNodes(int nodesInCluster, String group) { - Map dataNodes = new HashMap<>(); + DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (int i = 0; i < nodesInCluster; i++) { - dataNodes.put(randomAlphaOfLengthBetween(5, 15), createNode(group)); - } - DiscoveryNodes nodes = mock(DiscoveryNodes.class); - when(nodes.getDataNodes()).thenReturn(dataNodes); - return nodes; - } - - private static DiscoveryNode createNode(String group) { - DiscoveryNode mock = mock(DiscoveryNode.class); - if (ShardLimitValidator.FROZEN_GROUP.equals(group)) { - when(mock.getRoles()).thenReturn(randomBoolean() ? DiscoveryNodeRole.roles() : Set.of(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE)); - } else { - when(mock.getRoles()).thenReturn( - randomBoolean() + Set roles; + if (ShardLimitValidator.FROZEN_GROUP.equals(group)) { + roles = randomBoolean() ? DiscoveryNodeRole.roles() : Set.of(DiscoveryNodeRole.DATA_FROZEN_NODE_ROLE); + } else { + roles = randomBoolean() ? DiscoveryNodeRole.roles() : Set.of( randomFrom( @@ -284,10 +273,12 @@ private static DiscoveryNode createNode(String group) { DiscoveryNodeRole.DATA_WARM_NODE_ROLE, DiscoveryNodeRole.DATA_COLD_NODE_ROLE ) - ) - ); + ); + } + + builder.add(DiscoveryNodeUtils.builder(randomAlphaOfLengthBetween(5, 15)).roles(roles).build()); } - return mock; + return builder.build(); } private static Metadata.Builder freezeMetadata(Metadata.Builder builder, Metadata metadata) { diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index 288c186e5f882..c4d6cb6be502d 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.index.IndexService.IndexCreationContext; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.Analysis; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.CharFilterFactory; @@ -191,7 +192,7 @@ public void testStandardFilterBWC() throws IOException { // cacheing bug meant that it was still possible to create indexes using a standard // filter until 7.6 { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_6_0, IndexVersion.current()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_6_0, IndexVersion.current()); final Settings settings = Settings.builder() .put("index.analysis.analyzer.my_standard.tokenizer", "standard") .put("index.analysis.analyzer.my_standard.filter", "standard") @@ -202,7 +203,7 @@ public void testStandardFilterBWC() throws IOException { assertThat(exc.getMessage(), equalTo("The [standard] token filter has been removed.")); } { - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_5_2); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_5_2); final Settings settings = Settings.builder() .put("index.analysis.analyzer.my_standard.tokenizer", "standard") .put("index.analysis.analyzer.my_standard.filter", "standard") diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index c95e72d24bfaa..c81ea1579828f 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -72,7 +72,6 @@ import org.elasticsearch.cluster.service.ClusterStateTaskExecutorUtils; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.cluster.version.CompatibilityVersions; -import org.elasticsearch.cluster.version.CompatibilityVersionsUtils; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; @@ -409,9 +408,16 @@ public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNo new NodeJoinExecutor(allocationService, (s, p, r) -> {}), clusterState, List.of( - JoinTask.singleNode(discoveryNode, CompatibilityVersionsUtils.staticCurrent(), DUMMY_REASON, ActionListener.running(() -> { - throw new AssertionError("should not complete publication"); - }), clusterState.term()) + JoinTask.singleNode( + discoveryNode, + new CompatibilityVersions(transportVersion, Map.of()), + Set.of(), + DUMMY_REASON, + ActionListener.running(() -> { + throw new AssertionError("should not complete publication"); + }), + clusterState.term() + ) ) ); } @@ -427,6 +433,7 @@ public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List new JoinTask.NodeJoinTask( node, new CompatibilityVersions(transportVersion, Map.of()), + Set.of(), DUMMY_REASON, ActionListener.running(() -> { throw new AssertionError("should not complete publication"); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java index 998497f991ed5..e53019fd93506 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryStatusTests.java @@ -13,7 +13,7 @@ import org.apache.lucene.util.Version; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.store.StoreFileMetadata; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -23,7 +23,7 @@ import java.util.regex.Pattern; public class RecoveryStatusTests extends ESSingleNodeTestCase { - private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersion.MINIMUM_COMPATIBLE.luceneVersion(); + private static final Version MIN_SUPPORTED_LUCENE_VERSION = IndexVersions.MINIMUM_COMPATIBLE.luceneVersion(); public void testRenameTempFiles() throws IOException { IndexService service = createIndex("foo"); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java index 47d3777573c4f..e2e56d33bbba0 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/StartRecoveryRequestTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.UUIDs; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; @@ -42,8 +43,14 @@ public void testSerialization() throws Exception { final StartRecoveryRequest outRequest = new StartRecoveryRequest( new ShardId("test", "_na_", 0), UUIDs.randomBase64UUID(), - DiscoveryNodeUtils.builder("a").roles(emptySet()).version(targetNodeVersion, IndexVersion.ZERO, IndexVersion.current()).build(), - DiscoveryNodeUtils.builder("b").roles(emptySet()).version(targetNodeVersion, IndexVersion.ZERO, IndexVersion.current()).build(), + DiscoveryNodeUtils.builder("a") + .roles(emptySet()) + .version(targetNodeVersion, IndexVersions.ZERO, IndexVersion.current()) + .build(), + DiscoveryNodeUtils.builder("b") + .roles(emptySet()) + .version(targetNodeVersion, IndexVersions.ZERO, IndexVersion.current()) + .build(), randomNonNegativeLong(), metadataSnapshot, randomBoolean(), diff --git a/server/src/test/java/org/elasticsearch/node/NodeTests.java b/server/src/test/java/org/elasticsearch/node/NodeTests.java index 24bbca6ddf512..218073a1eb3f1 100644 --- a/server/src/test/java/org/elasticsearch/node/NodeTests.java +++ b/server/src/test/java/org/elasticsearch/node/NodeTests.java @@ -11,23 +11,17 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.bootstrap.BootstrapContext; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.PersistedClusterStateService; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; @@ -43,18 +37,14 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsServiceTests; import org.elasticsearch.plugins.RecoveryPlannerPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.MockHttpTransport; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.MediaType; import org.elasticsearch.xcontent.NamedObjectNotFoundException; @@ -77,7 +67,6 @@ import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Supplier; import static org.elasticsearch.test.NodeRoles.dataNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -442,22 +431,7 @@ protected void doClose() throws IOException { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { List components = new ArrayList<>(); components.add(new PluginComponentBinding<>(MyInterface.class, getRandomBool() ? new Foo() : new Bar())); return components; @@ -673,7 +647,7 @@ public void testPluggablePersistedClusterStateServiceValidation() throws IOExcep List.of(TestClusterCoordinationPlugin1.class, TestClusterCoordinationPlugin2.class, getTestTransportPlugin()) ) ).getMessage(), - containsString("multiple persisted-state-service factories found") + containsString("A single " + ClusterCoordinationPlugin.PersistedClusterStateServiceFactory.class.getName() + " was expected") ); try (Node node = new MockNode(baseSettings().build(), List.of(TestClusterCoordinationPlugin1.class, getTestTransportPlugin()))) { @@ -686,5 +660,4 @@ public void testPluggablePersistedClusterStateServiceValidation() throws IOExcep } } } - } diff --git a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java index cd9e23d80cb71..6ea93cc9c5940 100644 --- a/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java +++ b/server/src/test/java/org/elasticsearch/nodesinfo/NodeInfoStreamingTests.java @@ -22,7 +22,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.http.HttpInfo; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.ingest.IngestInfo; import org.elasticsearch.ingest.ProcessorInfo; import org.elasticsearch.monitor.jvm.JvmInfo; @@ -115,7 +115,7 @@ private static NodeInfo createNodeInfo() { Build build = Build.current(); DiscoveryNode node = DiscoveryNodeUtils.builder("test_node") .roles(emptySet()) - .version(VersionUtils.randomVersion(random()), IndexVersion.ZERO, IndexVersionUtils.randomVersion()) + .version(VersionUtils.randomVersion(random()), IndexVersions.ZERO, IndexVersionUtils.randomVersion()) .build(); Settings settings = randomBoolean() ? null : Settings.builder().put("test", "setting").build(); OsInfo osInfo = null; diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginIntrospectorTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginIntrospectorTests.java index c5e974c4abcfd..5e80b6d217a55 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginIntrospectorTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginIntrospectorTests.java @@ -8,38 +8,24 @@ package org.elasticsearch.plugins; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.allocator.ShardsAllocator; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.indices.recovery.plan.RecoveryPlannerService; import org.elasticsearch.indices.recovery.plan.ShardSnapshotsService; import org.elasticsearch.ingest.Processor; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.PrivilegedOperations; import org.elasticsearch.test.compiler.InMemoryJavaCompiler; import org.elasticsearch.test.jar.JarUtils; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.net.URL; import java.net.URLClassLoader; @@ -259,22 +245,7 @@ public final class FooPlugin extends q.AbstractFooPlugin { } public void testOverriddenMethodsBasic() { class FooPlugin extends Plugin { @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { return null; } } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index f474b8d34a177..d5bc5ad236b54 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -39,7 +39,7 @@ import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; import org.elasticsearch.snapshots.SnapshotDeleteListener; import org.elasticsearch.snapshots.SnapshotId; -import org.elasticsearch.telemetry.metric.Meter; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; @@ -51,6 +51,7 @@ import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.concurrent.Executor; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isA; @@ -347,7 +348,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { listener.onResponse(null); } @@ -483,7 +484,7 @@ private MeteredRepositoryTypeA(RepositoryMetadata metadata, ClusterService clust mock(RecoverySettings.class), BlobPath.EMPTY, Map.of("bucket", "bucket-a"), - Meter.NOOP + MeterRegistry.NOOP ); } @@ -511,7 +512,7 @@ private MeteredRepositoryTypeB(RepositoryMetadata metadata, ClusterService clust mock(RecoverySettings.class), BlobPath.EMPTY, Map.of("bucket", "bucket-b"), - Meter.NOOP + MeterRegistry.NOOP ); } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index bb4d4dac31a5a..44aff1850a869 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESTestCase; @@ -108,7 +109,7 @@ public void testAddSnapshots() { newSnapshot, new RepositoryData.SnapshotDetails( randomFrom(SnapshotState.SUCCESS, SnapshotState.PARTIAL, SnapshotState.FAILED), - randomFrom(IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE), + randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE), randomNonNegativeLong(), randomNonNegativeLong(), randomAlphaOfLength(10) @@ -140,7 +141,7 @@ public void testInitIndices() { snapshotId.getUUID(), new RepositoryData.SnapshotDetails( randomFrom(SnapshotState.values()), - randomFrom(IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE), + randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE), randomNonNegativeLong(), randomNonNegativeLong(), randomAlphaOfLength(10) @@ -208,7 +209,7 @@ public void testGetSnapshotState() { snapshotId, new RepositoryData.SnapshotDetails( state, - randomFrom(IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE), + randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE), randomNonNegativeLong(), randomNonNegativeLong(), randomAlphaOfLength(10) @@ -453,7 +454,7 @@ public static RepositoryData generateRandomRepoData() { snapshotId, new RepositoryData.SnapshotDetails( randomFrom(SnapshotState.values()), - randomFrom(IndexVersion.current(), IndexVersion.MINIMUM_COMPATIBLE), + randomFrom(IndexVersion.current(), IndexVersions.MINIMUM_COMPATIBLE), randomNonNegativeLong(), randomNonNegativeLong(), randomAlphaOfLength(10) diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java index a5fbc6bf4d89e..907eedbfa7bf6 100644 --- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -41,6 +42,7 @@ import org.elasticsearch.repositories.ShardGenerations; import org.elasticsearch.repositories.SnapshotShardContext; import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotState; import org.elasticsearch.test.ESIntegTestCase; @@ -50,12 +52,14 @@ import java.io.IOException; import java.nio.file.Path; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; @@ -290,7 +294,7 @@ public void testRepositoryDataDetails() throws Exception { ); }; - final RepositoryData repositoryData = PlainActionFuture.get(repository::getRepositoryData); + final RepositoryData repositoryData = AbstractSnapshotIntegTestCase.getRepositoryData(repository); final RepositoryData.SnapshotDetails snapshotDetails = repositoryData.getSnapshotDetails(snapshotId); snapshotDetailsAsserter.accept(snapshotDetails); @@ -308,7 +312,7 @@ public void testRepositoryDataDetails() throws Exception { repositoryData.getGenId() ); - snapshotDetailsAsserter.accept(PlainActionFuture.get(repository::getRepositoryData).getSnapshotDetails(snapshotId)); + snapshotDetailsAsserter.accept(AbstractSnapshotIntegTestCase.getRepositoryData(repository).getSnapshotDetails(snapshotId)); } private static void writeIndexGen(BlobStoreRepository repository, RepositoryData repositoryData, long generation) throws Exception { @@ -434,7 +438,7 @@ public void testGetRepositoryDataThreadContext() { threadContext.putHeader(headerName, headerValue); threadPool.generic().execute(ActionRunnable.wrap(listeners.acquire(), l -> { safeAwait(barrier); - repo.getRepositoryData(l.map(repositoryData -> { + repo.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l.map(repositoryData -> { assertEquals(headerValue, threadContext.getHeader(headerName)); return null; })); @@ -445,6 +449,42 @@ public void testGetRepositoryDataThreadContext() { future.actionGet(10, TimeUnit.SECONDS); } + public void testGetRepositoryDataForking() { + final var forkedListeners = Collections.synchronizedList(new ArrayList()); + final var future = new PlainActionFuture(); + try (var listeners = new RefCountingListener(future)) { + final var repo = setupRepo(); + final int threads = between(1, 5); + final var barrier = new CyclicBarrier(threads); + final var threadPool = client().threadPool(); + final var testThread = Thread.currentThread(); + final var resultsCountDown = new CountDownLatch(threads); + for (int i = 0; i < threads; i++) { + threadPool.generic().execute(ActionRunnable.wrap(listeners.acquire(), l -> { + final var callingThread = Thread.currentThread(); + safeAwait(barrier); + repo.getRepositoryData(runnable -> { + forkedListeners.add(runnable); + resultsCountDown.countDown(); + }, l.map(repositoryData -> { + final var currentThread = Thread.currentThread(); + if (currentThread == testThread) { + assertEquals(0, resultsCountDown.getCount()); + } else { + assertSame(callingThread, currentThread); + resultsCountDown.countDown(); + } + return null; + })); + })); + } + safeAwait(resultsCountDown); + forkedListeners.forEach(Runnable::run); + repo.getRepositoryData(runnable -> fail("should use cached value and not fork"), listeners.acquire(ignored -> {})); + } + future.actionGet(10, TimeUnit.SECONDS); + } + private Environment createEnvironment() { Path home = createTempDir(); return TestEnvironment.newEnvironment( diff --git a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java index e57b5039eb119..df9f4384719e3 100644 --- a/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/script/VectorScoreScriptUtilsTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.script; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.BinaryDenseVectorScriptDocValuesTests; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.KnnDenseVectorScriptDocValuesTests; @@ -42,11 +43,11 @@ public void testFloatVectorClassBindings() throws IOException { List fields = List.of( new BinaryDenseVectorDocValuesField( - BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.V_7_4_0), + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersions.V_7_4_0), "test", ElementType.FLOAT, dims, - IndexVersion.V_7_4_0 + IndexVersions.V_7_4_0 ), new BinaryDenseVectorDocValuesField( BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.current()), @@ -210,11 +211,11 @@ public void testByteVsFloatSimilarity() throws IOException { List fields = List.of( new BinaryDenseVectorDocValuesField( - BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.V_7_4_0), + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersions.V_7_4_0), "field0", ElementType.FLOAT, dims, - IndexVersion.V_7_4_0 + IndexVersions.V_7_4_0 ), new BinaryDenseVectorDocValuesField( BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, ElementType.FLOAT, IndexVersion.current()), diff --git a/server/src/test/java/org/elasticsearch/script/field/IPAddressTests.java b/server/src/test/java/org/elasticsearch/script/field/IPAddressTests.java index f6e6660151311..63e5f9f072e98 100644 --- a/server/src/test/java/org/elasticsearch/script/field/IPAddressTests.java +++ b/server/src/test/java/org/elasticsearch/script/field/IPAddressTests.java @@ -8,8 +8,16 @@ package org.elasticsearch.script.field; +import org.elasticsearch.common.io.stream.GenericNamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + public class IPAddressTests extends ESTestCase { public void testToString() { @@ -30,4 +38,14 @@ public void testV6() { assertFalse(addr4.isV4()); assertTrue(addr4.isV6()); } + + public void testWriteable() throws IOException { + var registry = new NamedWriteableRegistry( + List.of(new Entry(GenericNamedWriteable.class, IPAddress.NAMED_WRITEABLE_NAME, IPAddress::new)) + ); + var original = new IPAddress("192.168.1.1"); + var generic = copyNamedWriteable(original, registry, GenericNamedWriteable.class); + var copied = asInstanceOf(IPAddress.class, generic); + assertThat(copied.toString(), equalTo(original.toString())); + } } diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java index a21f3d3c7e0da..2be338efd7174 100644 --- a/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/DenseVectorTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.vectors.BinaryDenseVectorScriptDocValuesTests; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.test.ESTestCase; @@ -67,7 +68,7 @@ public void testFloatVsListQueryVector() { assertEquals(knn.cosineSimilarity(arrayQV), knn.cosineSimilarity(listQV), 0.001f); assertEquals(knn.cosineSimilarity((Object) listQV), knn.cosineSimilarity((Object) arrayQV), 0.001f); - for (IndexVersion indexVersion : List.of(IndexVersion.V_7_4_0, IndexVersion.current())) { + for (IndexVersion indexVersion : List.of(IndexVersions.V_7_4_0, IndexVersion.current())) { BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, ElementType.FLOAT, indexVersion); BinaryDenseVector bdv = new BinaryDenseVector(docVector, value, dims, indexVersion); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 80eb0c4ac6bbb..f5912872e004a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -18,7 +18,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.is; @@ -78,12 +78,12 @@ protected void indexData() throws Exception { indexRandom(true, docs); - SearchResponse resp = client().prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()).get(); - assertSearchResponse(resp); + SearchResponse resp = prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()).get(); + assertNoFailures(resp); long totalOnOne = resp.getHits().getTotalHits().value; assertThat(totalOnOne, is(15L)); - resp = client().prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()).get(); - assertSearchResponse(resp); + resp = prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()).get(); + assertNoFailures(resp); long totalOnTwo = resp.getHits().getTotalHits().value; assertThat(totalOnTwo, is(12L)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java index daf97a2a49e23..7f6bedcea5277 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregatorTests.java @@ -30,7 +30,7 @@ protected Point randomPoint() { } @Override - protected GeoBoundingBox randomBBox() { + protected GeoBoundingBox randomBBox(int precision) { Rectangle rectangle = GeometryTestUtils.randomRectangle(); return new GeoBoundingBox( new GeoPoint(rectangle.getMaxLat(), rectangle.getMinLon()), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java index 6a34eb104fd19..2cf729992cfb4 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileGridAggregatorTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.bucket.geogrid; -import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -16,6 +15,11 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Rectangle; +import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude; +import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude; +import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude; +import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude; + public class GeoTileGridAggregatorTests extends GeoGridAggregatorTestCase { @Override @@ -37,7 +41,7 @@ protected Point randomPoint() { } @Override - protected GeoBoundingBox randomBBox() { + protected GeoBoundingBox randomBBox(int precision) { GeoBoundingBox bbox = randomValueOtherThanMany( (b) -> b.top() > GeoTileUtils.LATITUDE_MASK || b.bottom() < -GeoTileUtils.LATITUDE_MASK, () -> { @@ -48,11 +52,42 @@ protected GeoBoundingBox randomBBox() { ); } ); - // Avoid numerical errors for sub-atomic values - double left = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(bbox.left())); - double right = GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(bbox.right())); - double top = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(bbox.top())); - double bottom = GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(bbox.bottom())); + final int tiles = 1 << precision; + + // Due to the way GeoTileBoundedPredicate works that adjust the given bounding box when it is touching the tiles, we need to + // adjust here in order not to generate bounding boxes touching tiles or the test will fail + + // compute tile at the top left + final Rectangle minTile = GeoTileUtils.toBoundingBox( + GeoTileUtils.getXTile(bbox.left(), tiles), + GeoTileUtils.getYTile(bbox.top(), tiles), + precision + ); + // adjust if it is touching the tile + final int encodedLeft = encodeLongitude(bbox.left()); + final double left = encodeLongitude(minTile.getMaxX()) == encodedLeft + ? decodeLongitude(encodedLeft + 1) + : decodeLongitude(encodedLeft); + final int encodedTop = encodeLatitude(bbox.top()); + final double bottom = encodeLatitude(minTile.getMinY()) == encodedTop + ? decodeLongitude(encodedTop + 1) + : decodeLatitude(encodedTop); + // compute tile at the bottom right + final Rectangle maxTile = GeoTileUtils.toBoundingBox( + GeoTileUtils.getXTile(bbox.right(), tiles), + GeoTileUtils.getYTile(bbox.bottom(), tiles), + precision + ); + // adjust if it is touching the tile + final int encodedRight = encodeLongitude(bbox.right()); + final double right = encodeLongitude(maxTile.getMinX()) == encodedRight + ? decodeLongitude(encodedRight) + : decodeLongitude(encodedRight + 1); + final int encodedBottom = encodeLatitude(bbox.bottom()); + final double top = encodeLatitude(maxTile.getMaxY()) == encodedBottom + ? decodeLatitude(encodedBottom) + : decodeLatitude(encodedBottom + 1); + bbox.topLeft().reset(top, left); bbox.bottomRight().reset(bottom, right); return bbox; @@ -60,14 +95,6 @@ protected GeoBoundingBox randomBBox() { @Override protected Rectangle getTile(double lng, double lat, int precision) { - int tiles = 1 << precision; - int x = GeoTileUtils.getXTile(lng, tiles); - int y = GeoTileUtils.getYTile(lat, tiles); - Rectangle r1 = GeoTileUtils.toBoundingBox(x, y, precision); - Rectangle r2 = GeoTileUtils.toBoundingBox(GeoTileUtils.longEncode(lng, lat, precision)); - if (r1.equals(r2) == false) { - int a = 0; - } return GeoTileUtils.toBoundingBox(GeoTileUtils.longEncode(lng, lat, precision)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java index e6de1e3aa5db8..7114ce46d3b79 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoTileUtilsTests.java @@ -256,12 +256,12 @@ public void testEncodingLuceneLonConsistency() { Matchers.anyOf(equalTo(x + 1), equalTo(tiles - 1)) ); // next encoded value down belongs to the tile - assertThat(GeoTileUtils.getXTile(quantizeLonDown(rectangle.getMaxX()), tiles), equalTo(x)); + assertThat(GeoTileUtils.getXTile(GeoUtils.quantizeLonDown(rectangle.getMaxX()), tiles), equalTo(x)); // min longitude belongs to the tile assertThat(GeoTileUtils.getXTile(GeoUtils.quantizeLon(rectangle.getMinX()), tiles), equalTo(x)); if (x != 0) { // next encoded value down belongs to the previous tile - assertThat(GeoTileUtils.getXTile(quantizeLonDown(rectangle.getMinX()), tiles), equalTo(x - 1)); + assertThat(GeoTileUtils.getXTile(GeoUtils.quantizeLonDown(rectangle.getMinX()), tiles), equalTo(x - 1)); } } } @@ -276,7 +276,7 @@ public void testEncodingLuceneLatConsistency() { assertThat(GeoTileUtils.getYTile(GeoUtils.quantizeLat(rectangle.getMaxLat()), tiles), equalTo(y)); if (y != 0) { // next encoded value up belongs to the previous tile - assertThat(GeoTileUtils.getYTile(quantizeLatUp(rectangle.getMaxLat()), tiles), equalTo(y - 1)); + assertThat(GeoTileUtils.getYTile(GeoUtils.quantizeLatUp(rectangle.getMaxLat()), tiles), equalTo(y - 1)); } // min latitude belongs to the next tile except the last one assertThat( @@ -284,15 +284,8 @@ public void testEncodingLuceneLatConsistency() { Matchers.anyOf(equalTo(y + 1), equalTo(tiles - 1)) ); // next encoded value up belongs to the tile - assertThat(GeoTileUtils.getYTile(quantizeLatUp(rectangle.getMinLat()), tiles), equalTo(y)); + assertThat(GeoTileUtils.getYTile(GeoUtils.quantizeLatUp(rectangle.getMinLat()), tiles), equalTo(y)); } } - private static double quantizeLonDown(double lon) { - return GeoEncodingUtils.decodeLongitude(GeoEncodingUtils.encodeLongitude(lon) - 1); - } - - private static double quantizeLatUp(double lat) { - return GeoEncodingUtils.decodeLatitude(GeoEncodingUtils.encodeLatitude(lat) + 1); - } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java index 0c72abfec48e0..190ab9914d933 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java @@ -24,7 +24,7 @@ public class InternalMedianAbsoluteDeviationTests extends InternalAggregationTes @Override protected InternalMedianAbsoluteDeviation createTestInstance(String name, Map metadata) { - final TDigestState valuesSketch = TDigestState.create(randomDoubleBetween(20, 1000, true)); + final TDigestState valuesSketch = TDigestState.create(randomFrom(50.0, 100.0, 200.0, 500.0, 1000.0)); final int numberOfValues = frequently() ? randomIntBetween(0, 1000) : 0; for (int i = 0; i < numberOfValues; i++) { valuesSketch.add(randomDouble()); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 3f7d67d292761..9e6b6330d2f23 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -70,6 +70,7 @@ import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.lucene.util.CombinedBitSet; +import org.elasticsearch.lucene.util.MatchAllBitSet; import org.elasticsearch.search.aggregations.BucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.test.ESTestCase; @@ -96,6 +97,7 @@ import static org.elasticsearch.search.internal.ExitableDirectoryReader.ExitableLeafReader; import static org.elasticsearch.search.internal.ExitableDirectoryReader.ExitablePointValues; import static org.elasticsearch.search.internal.ExitableDirectoryReader.ExitableTerms; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; @@ -279,7 +281,6 @@ public void testContextIndexSearcherSparseWithDeletions() throws IOException { doTestContextIndexSearcher(true, true); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/94615") public void testContextIndexSearcherDenseWithDeletions() throws IOException { doTestContextIndexSearcher(false, true); } @@ -332,7 +333,7 @@ public void onRemoval(ShardId shardId, Accountable accountable) { if (sparse) { assertThat(bitSet, instanceOf(SparseFixedBitSet.class)); } else { - assertThat(bitSet, instanceOf(FixedBitSet.class)); + assertThat(bitSet, anyOf(instanceOf(FixedBitSet.class), instanceOf(MatchAllBitSet.class))); } DocumentSubsetDirectoryReader filteredReader = new DocumentSubsetDirectoryReader(reader, cache, roleQuery); @@ -564,7 +565,7 @@ public void testCancelSliceTasksOnException() throws Exception { 1 ) ) { - leafSlices = contextIndexSearcher.getSlicesForCollection(); + leafSlices = contextIndexSearcher.getSlices(); int numThrowingLeafSlices = randomIntBetween(1, 3); for (int i = 0; i < numThrowingLeafSlices; i++) { LeafSlice throwingLeafSlice = leafSlices[randomIntBetween(0, Math.min(leafSlices.length, numAvailableThreads) - 1)]; @@ -700,7 +701,7 @@ public void testCancelSliceTasksOnTimeout() throws Exception { 1 ) ) { - leafSlices = contextIndexSearcher.getSlicesForCollection(); + leafSlices = contextIndexSearcher.getSlices(); int numThrowingLeafSlices = randomIntBetween(1, 3); for (int i = 0; i < numThrowingLeafSlices; i++) { LeafSlice throwingLeafSlice = leafSlices[randomIntBetween(0, Math.min(leafSlices.length, numAvailableThreads) - 1)]; diff --git a/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java index fee89d54d7946..e7e6aae195bec 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java @@ -147,7 +147,7 @@ public void testRefreshRepositoryUuidsRefreshesAsNeeded() { when(freshBlobStoreRepo.getMetadata()).thenReturn( new RepositoryMetadata(repositoryName, randomAlphaOfLength(3), Settings.EMPTY).withUuid(UUIDs.randomBase64UUID()) ); - doThrow(new AssertionError("repo UUID already known")).when(freshBlobStoreRepo).getRepositoryData(any()); + doThrow(new AssertionError("repo UUID already known")).when(freshBlobStoreRepo).getRepositoryData(any(), any()); } case 3 -> { final Repository staleBlobStoreRepo = mock(BlobStoreRepository.class); @@ -158,16 +158,14 @@ public void testRefreshRepositoryUuidsRefreshesAsNeeded() { ); doAnswer(invocationOnMock -> { assertTrue(pendingRefreshes.remove(repositoryName)); - @SuppressWarnings("unchecked") - ActionListener repositoryDataListener = (ActionListener) invocationOnMock - .getArguments()[0]; + final ActionListener repositoryDataListener = invocationOnMock.getArgument(1); if (randomBoolean()) { repositoryDataListener.onResponse(null); } else { repositoryDataListener.onFailure(new Exception("simulated")); } return null; - }).when(staleBlobStoreRepo).getRepositoryData(any()); + }).when(staleBlobStoreRepo).getRepositoryData(any(), any()); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index 1b5ff3f39be22..07acfa96a6a93 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1223,7 +1223,7 @@ public void testRunConcurrentSnapshots() { private RepositoryData getRepositoryData(Repository repository) { final PlainActionFuture res = PlainActionFuture.newFuture(); - repository.getRepositoryData(res); + repository.getRepositoryData(deterministicTaskQueue::scheduleNow, res); deterministicTaskQueue.runAllRunnableTasks(); assertTrue(res.isDone()); return res.actionGet(); @@ -2203,7 +2203,8 @@ public void start(ClusterState initialState) { new Reconfigurator(clusterService.getSettings(), clusterService.getClusterSettings()), LeaderHeartbeatService.NO_OP, StatefulPreVoteCollector::new, - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); masterService.setClusterStatePublisher(coordinator); coordinator.start(); diff --git a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java index 1ec8a48b5d168..57fe0bfe3a9e3 100644 --- a/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java +++ b/server/src/test/java/org/elasticsearch/test/search/aggregations/bucket/SharedSignificantTermsTestMethods.java @@ -27,8 +27,9 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.significantTerms; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; import static org.elasticsearch.test.ESIntegTestCase.client; +import static org.elasticsearch.test.ESIntegTestCase.prepareSearch; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; public class SharedSignificantTermsTestMethods { @@ -47,11 +48,10 @@ public static void aggregateAndCheckFromSeveralShards(ESIntegTestCase testCase) } private static void checkSignificantTermsAggregationCorrect(ESIntegTestCase testCase) { - SearchResponse response = client().prepareSearch(INDEX_NAME) - .addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD))) - .execute() - .actionGet(); - assertSearchResponse(response); + SearchResponse response = prepareSearch(INDEX_NAME).addAggregation( + terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)) + ).execute().actionGet(); + assertNoFailures(response); StringTerms classes = response.getAggregations().get("class"); Assert.assertThat(classes.getBuckets().size(), equalTo(2)); for (Terms.Bucket classBucket : classes.getBuckets()) { diff --git a/server/src/test/java/org/elasticsearch/transport/ClusterConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/ClusterConnectionManagerTests.java index d16b1f304939f..712f44bfa1115 100644 --- a/server/src/test/java/org/elasticsearch/transport/ClusterConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ClusterConnectionManagerTests.java @@ -510,7 +510,7 @@ public void onFailure(Exception e) { connectionPermits.release(); runAgain(); } else { - throw new AssertionError("unexpected", e); + fail(e); } } @@ -592,7 +592,7 @@ public void onFailure(Exception e) { && e.getMessage().contains("concurrently connecting and disconnecting")) { runAgain(); } else { - throw new AssertionError("unexpected", e); + fail(e); } } diff --git a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java index 965288a989870..49e0ab1653432 100644 --- a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.junit.annotations.TestLogging; @@ -237,7 +238,7 @@ public void testProxyStrategyWillOpenNewConnectionsOnDisconnect() throws Excepti public void testConnectFailsWithIncompatibleNodes() { VersionInformation incompatibleVersion = new VersionInformation( Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java index 47d75a78d65d4..3e05743741f73 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterClientTests.java @@ -52,7 +52,6 @@ public void tearDown() throws Exception { ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97080") public void testConnectAndExecuteRequest() throws Exception { Settings remoteSettings = Settings.builder() .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), "foo_bar_cluster") diff --git a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java index 3f67c0c65d559..31096a53e67c0 100644 --- a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.VersionUtils; @@ -374,7 +375,7 @@ public void testDiscoverWithSingleIncompatibleSeedNode() { List knownNodes = new CopyOnWriteArrayList<>(); VersionInformation incompatibleVersion = new VersionInformation( Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); @@ -453,7 +454,7 @@ public void testConnectFailsWithIncompatibleNodes() { List knownNodes = new CopyOnWriteArrayList<>(); VersionInformation incompatibleVersion = new VersionInformation( Version.CURRENT.minimumCompatibilityVersion().minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); TransportVersion incompatibleTransportVersion = TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE); @@ -1086,7 +1087,7 @@ public void testGetNodePredicateNodeVersion() { Version version = VersionUtils.randomVersion(random()); DiscoveryNode node = DiscoveryNodeUtils.builder("id") .address(address) - .version(version, IndexVersion.ZERO, IndexVersion.current()) + .version(version, IndexVersions.ZERO, IndexVersion.current()) .build(); assertThat(nodePredicate.test(node), equalTo(Version.CURRENT.isCompatible(version))); } diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 32a5b5dec9597..fb0eb314a1e33 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.RefCounted; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancellationService; @@ -53,7 +54,7 @@ public class TransportActionProxyTests extends ESTestCase { private static final Version CURRENT_VERSION = Version.fromString(String.valueOf(Version.CURRENT.major) + ".0.0"); protected static final VersionInformation version0 = new VersionInformation( CURRENT_VERSION.minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); protected static final TransportVersion transportVersion0 = TransportVersions.MINIMUM_COMPATIBLE; @@ -63,7 +64,7 @@ public class TransportActionProxyTests extends ESTestCase { protected static final VersionInformation version1 = new VersionInformation( Version.fromId(CURRENT_VERSION.id + 1), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ); protected static final TransportVersion transportVersion1 = TransportVersion.fromId(TransportVersion.current().id() + 1); diff --git a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index fe77b161a9223..25ce8254a59c2 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; @@ -129,7 +130,7 @@ public void testConnectToNodeLight() { TransportVersionUtils.randomCompatibleVersion(random()), new VersionInformation( VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ), TransportService.NOOP_TRANSPORT_INTERCEPTOR @@ -137,7 +138,7 @@ public void testConnectToNodeLight() { DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("") .address(transportServiceB.getLocalNode().getAddress()) .roles(emptySet()) - .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); try ( Transport.Connection connection = AbstractSimpleTransportTestCase.openConnection( @@ -174,7 +175,7 @@ public void testMismatchedClusterName() { DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("") .address(transportServiceB.getLocalNode().getAddress()) .roles(emptySet()) - .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { try ( @@ -209,7 +210,7 @@ public void testIncompatibleNodeVersions() { TransportVersions.MINIMUM_COMPATIBLE, new VersionInformation( VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ), TransportService.NOOP_TRANSPORT_INTERCEPTOR @@ -217,7 +218,7 @@ public void testIncompatibleNodeVersions() { DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("") .address(transportServiceB.getLocalNode().getAddress()) .roles(emptySet()) - .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { try ( @@ -258,13 +259,13 @@ public void testIncompatibleTransportVersions() { "TS_B", settings, TransportVersionUtils.getPreviousVersion(TransportVersions.MINIMUM_COMPATIBLE), - new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()), + new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()), TransportService.NOOP_TRANSPORT_INTERCEPTOR ); DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("") .address(transportServiceB.getLocalNode().getAddress()) .roles(emptySet()) - .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); expectThrows(ConnectTransportException.class, () -> { try ( @@ -336,7 +337,7 @@ public void testRejectsMismatchedBuildHash() { final DiscoveryNode discoveryNode = DiscoveryNodeUtils.builder("") .address(transportServiceB.getLocalNode().getAddress()) .roles(emptySet()) - .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build(); TransportSerializationException ex = expectThrows(TransportSerializationException.class, () -> { try ( @@ -407,7 +408,7 @@ public void testAcceptsMismatchedBuildHashFromDifferentVersion() { "TS_B", Settings.builder().put("cluster.name", "a").build(), TransportVersions.MINIMUM_COMPATIBLE, - new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()), + new VersionInformation(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()), transportInterceptorB ); AbstractSimpleTransportTestCase.connectToNode(transportServiceA, transportServiceB.getLocalNode(), TestProfiles.LIGHT_PROFILE); diff --git a/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java b/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java index 784730efff999..d9c239905f343 100644 --- a/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java +++ b/test/external-modules/seek-tracking-directory/src/internalClusterTest/java/org/elasticsearch/test/seektracker/SeekTrackerPluginIT.java @@ -45,7 +45,7 @@ public void testSeekTrackerPlugin() throws InterruptedException { } indexRandom(true, docs); - client().prepareSearch("index").setQuery(QueryBuilders.termQuery("field", "term2")).get(); + prepareSearch("index").setQuery(QueryBuilders.termQuery("field", "term2")).get(); SeekStatsResponse response = client().execute(SeekTrackerPlugin.SEEK_STATS_ACTION, new SeekStatsRequest("index")).actionGet(); List shardSeekStats = response.getSeekStats().get("index"); diff --git a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java index de2ac43f7fb51..aa9ff52b00824 100644 --- a/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java +++ b/test/external-modules/seek-tracking-directory/src/main/java/org/elasticsearch/test/seektracker/SeekTrackerPlugin.java @@ -11,31 +11,18 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.Collections; @@ -66,22 +53,7 @@ public List> getSettings() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { return Collections.singletonList(seekStatsService); } diff --git a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java index 6e65ec2749010..09dfdc991b82c 100644 --- a/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java +++ b/test/fixtures/s3-fixture/src/main/java/fixture/s3/S3HttpHandler.java @@ -102,8 +102,10 @@ public void handle(final HttpExchange exchange) throws IOException { uploadsList.append("10000"); uploadsList.append("false"); - for (MultipartUpload value : uploads.values()) { - value.appendXml(uploadsList); + for (final var multipartUpload : uploads.values()) { + if (multipartUpload.getPath().startsWith(prefix)) { + multipartUpload.appendXml(uploadsList); + } } uploadsList.append(""); diff --git a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java index d36265dfec227..5227c333e5659 100644 --- a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java +++ b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java @@ -294,7 +294,7 @@ private static TestHttpResponse handleRequest(S3HttpHandler handler, String meth try { handler.handle(httpExchange); } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } assertNotEquals(0, httpExchange.getResponseCode()); return new TestHttpResponse( diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java index c82c635453edc..19eb072cb0c3f 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/DiskUsageIntegTestCase.java @@ -83,7 +83,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { try { Files.createDirectories(dataPath); } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } fileSystemProvider.addTrackedPath(dataPath); return Settings.builder() diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java index 9ae8b2dfbe0c6..138ab77035b43 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java @@ -40,6 +40,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.gateway.GatewayAllocator; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.elasticsearch.snapshots.SnapshotsInfoService; import org.elasticsearch.test.ClusterServiceUtils; @@ -215,7 +216,7 @@ protected static DiscoveryNode newNode(String nodeId, Version version) { protected static DiscoveryNode newNode(String nodeId, Version version, IndexVersion indexVersion) { return DiscoveryNodeUtils.builder(nodeId) .roles(MASTER_DATA_ROLES) - .version(version, IndexVersion.MINIMUM_COMPATIBLE, indexVersion) + .version(version, IndexVersions.MINIMUM_COMPATIBLE, indexVersion) .build(); } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java index 0b19f1aa422f3..3d90a253e48c6 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/AbstractCoordinatorTestCase.java @@ -31,6 +31,7 @@ import org.elasticsearch.cluster.coordination.AbstractCoordinatorTestCase.Cluster.ClusterNode; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.LinearizabilityChecker.History; +import org.elasticsearch.cluster.coordination.LinearizabilityChecker.LinearizabilityCheckAborted; import org.elasticsearch.cluster.coordination.LinearizabilityChecker.SequentialSpec; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -75,7 +76,6 @@ import org.elasticsearch.monitor.NodeHealthService; import org.elasticsearch.monitor.StatusInfo; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesRefRecycler; import org.elasticsearch.transport.DisruptableMockTransport; @@ -103,9 +103,6 @@ import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; -import java.util.concurrent.ScheduledThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BiConsumer; import java.util.function.BiFunction; @@ -747,22 +744,12 @@ private void stabilise(long stabilisationDurationMillis, boolean expectIdleJoinV ); logger.info("checking linearizability of history with size {}: {}", history.size(), history); - final AtomicBoolean abort = new AtomicBoolean(); - // Large histories can be problematic and have the linearizability checker run OOM - // Bound the time how long the checker can run on such histories (Values empirically determined) - final ScheduledThreadPoolExecutor scheduler = Scheduler.initScheduler(Settings.EMPTY, "test-scheduler"); try { - if (history.size() > 300) { - scheduler.schedule(() -> abort.set(true), 10, TimeUnit.SECONDS); - } - final boolean linearizable = LinearizabilityChecker.isLinearizable(spec, history, i -> null, abort::get); - if (abort.get() == false) { - assertTrue("history not linearizable: " + history, linearizable); - } - } finally { - ThreadPool.terminate(scheduler, 1, TimeUnit.SECONDS); + final boolean linearizable = LinearizabilityChecker.isLinearizable(spec, history, i -> null); + assertTrue("history is not linearizable: " + history, linearizable); + } catch (LinearizabilityCheckAborted e) { + logger.warn("linearizability check check was aborted", e); } - logger.info("linearizability check completed"); } void bootstrapIfNecessary() { @@ -1165,7 +1152,8 @@ public RecyclerBytesStreamOutput newNetworkBytesStream() { coordinationServices.getReconfigurator(), coordinationServices.getLeaderHeartbeatService(), coordinationServices.getPreVoteCollectorFactory(), - CompatibilityVersionsUtils.staticCurrent() + CompatibilityVersionsUtils.staticCurrent(), + Set.of() ); coordinationDiagnosticsService = new CoordinationDiagnosticsService( clusterService, @@ -2254,7 +2242,7 @@ public void close() { try { delegate.close(); } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/LinearizabilityChecker.java b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/LinearizabilityChecker.java index ceca88ef7573e..223b0dc5a546b 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/coordination/LinearizabilityChecker.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/coordination/LinearizabilityChecker.java @@ -11,8 +11,11 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Tuple; +import org.elasticsearch.threadpool.Scheduler; +import org.elasticsearch.threadpool.ThreadPool; import java.util.ArrayList; import java.util.Collection; @@ -27,6 +30,9 @@ import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentLinkedQueue; +import java.util.concurrent.ScheduledThreadPoolExecutor; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.BooleanSupplier; import java.util.function.Consumer; @@ -207,15 +213,10 @@ public String toString() { } /** - * Checks whether the provided history is linearizable with respect to the given sequential specification - * - * @param spec the sequential specification of the datatype - * @param history the history of events to check for linearizability - * @param missingResponseGenerator used to complete the history with missing responses - * @return true iff the history is linearizable w.r.t. the given spec + * Convenience method for {@link #isLinearizable(SequentialSpec, History, Function)} that requires the history to be complete */ - public static boolean isLinearizable(SequentialSpec spec, History history, Function missingResponseGenerator) { - return isLinearizable(spec, history, missingResponseGenerator, () -> false); + public static boolean isLinearizable(SequentialSpec spec, History history) throws LinearizabilityCheckAborted { + return isLinearizable(spec, history, o -> { throw new AssertionError("history is not complete"); }); } /** @@ -224,21 +225,37 @@ public static boolean isLinearizable(SequentialSpec spec, History history, Funct * @param spec the sequential specification of the datatype * @param history the history of events to check for linearizability * @param missingResponseGenerator used to complete the history with missing responses - * @param terminateEarly a condition upon which to terminate early * @return true iff the history is linearizable w.r.t. the given spec */ - public static boolean isLinearizable( - SequentialSpec spec, - History history, - Function missingResponseGenerator, - BooleanSupplier terminateEarly - ) { - history = history.clone(); // clone history before completing it - history.complete(missingResponseGenerator); // complete history - final Collection> partitions = spec.partition(history.copyEvents()); - return partitions.stream().allMatch(h -> isLinearizable(spec, h, terminateEarly)); + public static boolean isLinearizable(SequentialSpec spec, History history, Function missingResponseGenerator) + throws LinearizabilityCheckAborted { + final ScheduledThreadPoolExecutor scheduler = Scheduler.initScheduler(Settings.EMPTY, "test-scheduler"); + final AtomicBoolean abort = new AtomicBoolean(); + try { + history = history.clone(); // clone history before completing it + history.complete(missingResponseGenerator); // complete history + final Collection> partitions = spec.partition(history.copyEvents()); + // Large histories can be problematic and have the linearizability checker run OOM + // Bound the time how long the checker can run on such histories (Values empirically determined) + if (history.size() > 300 || partitions.stream().anyMatch(p -> p.size() > 25)) { + logger.warn("Detected large history or partition for linearizable check. Limiting execution time"); + scheduler.schedule(() -> abort.set(true), 10, TimeUnit.SECONDS); + } + var allLinearizable = partitions.stream().allMatch(h -> isLinearizable(spec, h, abort::get)); + if (abort.get()) { + throw new LinearizabilityCheckAborted(); + } + return allLinearizable; + } finally { + ThreadPool.terminate(scheduler, 1, TimeUnit.SECONDS); + } } + /** + * This exception is thrown if the check could not be completed due to timeout or OOM (that could be caused by long event history) + */ + public static final class LinearizabilityCheckAborted extends Exception {} + private static boolean isLinearizable(SequentialSpec spec, List history, BooleanSupplier terminateEarly) { logger.debug("Checking history of size: {}: {}", history.size(), history); Object state = spec.initialState(); // the current state of the datatype @@ -287,13 +304,6 @@ private static boolean isLinearizable(SequentialSpec spec, List history, return true; } - /** - * Convenience method for {@link #isLinearizable(SequentialSpec, History, Function)} that requires the history to be complete - */ - public static boolean isLinearizable(SequentialSpec spec, History history) { - return isLinearizable(spec, history, o -> { throw new IllegalArgumentException("history is not complete"); }); - } - /** * Return a visual representation of the history */ diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java b/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java index 0ecd0ac27c0f0..784b82c713d3b 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/service/ClusterStateTaskExecutorUtils.java @@ -19,6 +19,7 @@ import java.util.function.Consumer; import java.util.stream.StreamSupport; +import static org.elasticsearch.test.ESTestCase.fail; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -36,7 +37,7 @@ public static ClusterState executeAndAssert ClusterStateTaskExecutor executor, Iterable tasks ) throws Exception { - return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> { throw new AssertionError(e); }); + return executeHandlingResults(originalState, executor, tasks, task -> {}, (task, e) -> fail(e)); } public static ClusterState executeAndThrowFirstFailure( diff --git a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java index e51116898a7ea..628ba1c8fda3b 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java +++ b/test/framework/src/main/java/org/elasticsearch/index/KnownIndexVersions.java @@ -17,5 +17,5 @@ public class KnownIndexVersions { /** * A sorted list of all known transport versions */ - public static final List ALL_VERSIONS = List.copyOf(IndexVersion.getAllVersions()); + public static final List ALL_VERSIONS = List.copyOf(IndexVersions.getAllVersions()); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index ab9d80b801863..21001f0ac2fac 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -1637,12 +1637,12 @@ public static void recoverFromTranslog(Engine engine, Engine.TranslogRecoveryRun && executionException.getCause() instanceof IOException ioException) { throw ioException; } else { - throw new AssertionError("unexpected", e); + fail(e); } } catch (RuntimeException e) { throw e; } catch (Exception e) { - throw new AssertionError("unexpected", e); + fail(e); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 6b56418e14663..56ad35bee83d5 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -11,6 +11,7 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -37,6 +38,7 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -381,6 +383,53 @@ public final void testCacheable() throws IOException { } } + protected final List blockLoaderReadValues(DirectoryReader reader, MappedFieldType fieldType) throws IOException { + BlockLoader loader = fieldType.blockLoader(blContext()); + List all = new ArrayList<>(); + for (LeafReaderContext ctx : reader.leaves()) { + TestBlock block = (TestBlock) loader.reader(ctx).readValues(TestBlock.FACTORY, TestBlock.docs(ctx)); + for (int i = 0; i < block.size(); i++) { + all.add(block.get(i)); + } + } + return all; + } + + protected final List blockLoaderReadValuesFromSingleDoc(DirectoryReader reader, MappedFieldType fieldType) throws IOException { + BlockLoader loader = fieldType.blockLoader(blContext()); + List all = new ArrayList<>(); + for (LeafReaderContext ctx : reader.leaves()) { + BlockDocValuesReader blockReader = loader.reader(ctx); + TestBlock block = (TestBlock) blockReader.builder(TestBlock.FACTORY, ctx.reader().numDocs()); + for (int i = 0; i < ctx.reader().numDocs(); i++) { + blockReader.readValuesFromSingleDoc(i, block); + } + for (int i = 0; i < block.size(); i++) { + all.add(block.get(i)); + } + } + return all; + } + + private MappedFieldType.BlockLoaderContext blContext() { + return new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + throw new UnsupportedOperationException(); + } + + @Override + public SearchLookup lookup() { + return mockContext().lookup(); + } + + @Override + public Set sourcePaths(String name) { + throw new UnsupportedOperationException(); + } + }; + } + private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery) { Exception e = expectThrows(IllegalArgumentException.class, buildQuery); assertThat( diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java index 692f7c6810254..e34072fbf1668 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperTestCase.java @@ -31,8 +31,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -51,6 +53,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.Source; import org.elasticsearch.search.lookup.SourceProvider; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -70,6 +73,7 @@ import java.util.stream.IntStream; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -87,7 +91,7 @@ */ public abstract class MapperTestCase extends MapperServiceTestCase { - public static final IndexVersion DEPRECATED_BOOST_INDEX_VERSION = IndexVersion.V_7_10_0; + public static final IndexVersion DEPRECATED_BOOST_INDEX_VERSION = IndexVersions.V_7_10_0; protected abstract void minimalMapping(XContentBuilder b) throws IOException; @@ -391,7 +395,7 @@ public final void testEmptyName() { public final void testBlankName() { IndexVersion version = getVersion(); - assumeTrue("blank field names are rejected from 8.6.0 onwards", version.onOrAfter(IndexVersion.V_8_6_0)); + assumeTrue("blank field names are rejected from 8.6.0 onwards", version.onOrAfter(IndexVersions.V_8_6_0)); MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(version, mapping(b -> { b.startObject(" "); minimalMapping(b); @@ -528,7 +532,7 @@ public void testBoostNotAllowed() throws IOException { } protected IndexVersion boostNotAllowedIndexVersion() { - return IndexVersion.V_8_0_0; + return IndexVersions.V_8_0_0; } /** @@ -1033,10 +1037,24 @@ protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { public record SyntheticSourceExample( CheckedConsumer inputValue, CheckedConsumer result, + CheckedConsumer blockLoaderResult, CheckedConsumer mapping ) { public SyntheticSourceExample(Object inputValue, Object result, CheckedConsumer mapping) { - this(b -> b.value(inputValue), b -> b.value(result), mapping); + this(b -> b.value(inputValue), b -> b.value(result), b -> b.value(result), mapping); + } + + /** + * Create an example that returns different results from doc values + * than from synthetic source. + */ + public SyntheticSourceExample( + Object inputValue, + Object result, + Object blockLoaderResults, + CheckedConsumer mapping + ) { + this(b -> b.value(inputValue), b -> b.value(result), b -> b.value(blockLoaderResults), mapping); } private void buildInput(XContentBuilder b) throws IOException { @@ -1049,6 +1067,20 @@ private String expected() throws IOException { result.accept(b); return Strings.toString(b.endObject()); } + + private Object expectedParsed() throws IOException { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, expected(), false).get("field"); + } + + private String expectedBlockLoader() throws IOException { + XContentBuilder b = JsonXContent.contentBuilder().startObject().field("field"); + blockLoaderResult.accept(b); + return Strings.toString(b.endObject()); + } + + private Object expectedParsedBlockLoader() throws IOException { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, expectedBlockLoader(), false).get("field"); + } } public record SyntheticSourceInvalidExample(Matcher error, CheckedConsumer mapping) {} @@ -1077,7 +1109,7 @@ public final void testSyntheticSourceIgnoreMalformedExamples() throws IOExceptio assumeTrue("type doesn't support ignore_malformed", supportsIgnoreMalformed()); CheckedConsumer mapping = syntheticSourceSupport(true).example(1).mapping(); for (ExampleMalformedValue v : exampleMalformedValues()) { - assertSyntheticSource(new SyntheticSourceExample(v.value, v.value, mapping)); + assertSyntheticSource(new SyntheticSourceExample(v.value, v.value, v.value, mapping)); } } @@ -1208,6 +1240,99 @@ public final void testSyntheticEmptyListNoDocValuesLoader() throws IOException { assertNoDocValueLoader(b -> b.startArray("field").endArray()); } + public final void testBlockLoaderReadValues() throws IOException { + testBlockLoader(blockReader -> (TestBlock) blockReader.readValues(TestBlock.FACTORY, TestBlock.docs(0))); + } + + public final void testBlockLoaderReadValuesFromSingleDoc() throws IOException { + testBlockLoader(blockReader -> { + TestBlock block = (TestBlock) blockReader.builder(TestBlock.FACTORY, 1); + blockReader.readValuesFromSingleDoc(0, block); + return block; + }); + } + + private void testBlockLoader(CheckedFunction body) throws IOException { + SyntheticSourceExample example = syntheticSourceSupport(false).example(5); + MapperService mapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("field"); + example.mapping().accept(b); + b.endObject(); + })); + BlockLoader loader = mapper.fieldType("field").blockLoader(new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + throw new UnsupportedOperationException(); + } + + @Override + public SearchLookup lookup() { + throw new UnsupportedOperationException(); + } + + @Override + public Set sourcePaths(String name) { + return mapper.mappingLookup().sourcePaths(name); + } + }); + Function valuesConvert = loadBlockExpected(); + if (valuesConvert == null) { + assertNull(loader); + return; + } + try (Directory directory = newDirectory()) { + RandomIndexWriter iw = new RandomIndexWriter(random(), directory); + LuceneDocument doc = mapper.documentMapper().parse(source(b -> { + b.field("field"); + example.inputValue.accept(b); + })).rootDoc(); + iw.addDocument(doc); + iw.close(); + try (DirectoryReader reader = DirectoryReader.open(directory)) { + TestBlock block = body.apply(loader.reader(reader.leaves().get(0))); + Object inBlock = block.get(0); + if (inBlock != null) { + if (inBlock instanceof List l) { + inBlock = l.stream().map(valuesConvert).toList(); + } else { + inBlock = valuesConvert.apply(inBlock); + } + } + Object expected = loader instanceof BlockSourceReader ? example.expectedParsed() : example.expectedParsedBlockLoader(); + if (List.of().equals(expected)) { + assertThat(inBlock, nullValue()); + return; + } + if (expected instanceof List l) { + ListMatcher m = ListMatcher.matchesList(); + for (Object v : l) { + m = m.item(blockItemMatcher(v)); + } + assertMap((List) inBlock, m); + return; + } + @SuppressWarnings("unchecked") + Matcher e = (Matcher) blockItemMatcher(expected); + assertThat(inBlock, e); + } + } + } + + /** + * Matcher for {@link #testBlockLoaderReadValues} and {@link #testBlockLoaderReadValuesFromSingleDoc}. + */ + protected Matcher blockItemMatcher(Object expected) { + return equalTo(expected); + } + + /** + * How {@link MappedFieldType#blockLoader} should load values or {@code null} + * if that method isn't supported by field being tested. + */ + protected Function loadBlockExpected() { + return null; + } + public final void testEmptyDocumentNoDocValueLoader() throws IOException { assumeFalse("Field will add values even if no fields are supplied", addsValueWhenNotSupplied()); assertNoDocValueLoader(b -> {}); diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java index d8cf644e87105..77391aadaa554 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MetadataMapperTestCase.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentBuilder; @@ -142,8 +143,8 @@ public final void testFixedMetaFieldsAreNotConfigurable() throws IOException { public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_6_0); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, previousVersion); + IndexVersion previousVersion = IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_6_0); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, previousVersion); assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); MapperService mapperService = createMapperService(version, mapping(b -> {})); // these parameters were previously silently ignored, they will still be ignored in existing indices @@ -166,7 +167,7 @@ public void testTypeAndFriendsAreAcceptedBefore_8_6_0() throws IOException { public void testTypeAndFriendsAreDeprecatedFrom_8_6_0() throws IOException { assumeTrue("Metadata field " + fieldName() + " isn't configurable", isConfigurable()); - IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_6_0, IndexVersion.current()); + IndexVersion version = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_6_0, IndexVersion.current()); assumeTrue("Metadata field " + fieldName() + " is not supported on version " + version, isSupportedOn(version)); MapperService mapperService = createMapperService(version, mapping(b -> {})); // these parameters were previously silently ignored, they are now deprecated in new indices diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java new file mode 100644 index 0000000000000..298acb9519532 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestBlock.java @@ -0,0 +1,210 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.List; + +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; + +public class TestBlock + implements + BlockLoader.BooleanBuilder, + BlockLoader.BytesRefBuilder, + BlockLoader.DoubleBuilder, + BlockLoader.IntBuilder, + BlockLoader.LongBuilder, + BlockLoader.SingletonOrdinalsBuilder, + BlockLoader.Block { + public static BlockLoader.BuilderFactory FACTORY = new BlockLoader.BuilderFactory() { + @Override + public BlockLoader.BooleanBuilder booleansFromDocValues(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.BooleanBuilder booleans(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.BytesRefBuilder bytesRefsFromDocValues(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.BytesRefBuilder bytesRefs(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.DoubleBuilder doublesFromDocValues(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.DoubleBuilder doubles(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.IntBuilder intsFromDocValues(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.IntBuilder ints(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.LongBuilder longsFromDocValues(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.LongBuilder longs(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.Builder nulls(int expectedCount) { + return new TestBlock(null); + } + + @Override + public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { + return new TestBlock(ordinals); + } + }; + + public static final BlockLoader.Docs docs(int... docs) { + return new BlockLoader.Docs() { + @Override + public int count() { + return docs.length; + } + + @Override + public int get(int i) { + return docs[i]; + } + }; + } + + public static final BlockLoader.Docs docs(LeafReaderContext ctx) { + return new BlockLoader.Docs() { + @Override + public int count() { + return ctx.reader().numDocs(); + } + + @Override + public int get(int i) { + return i; + } + }; + } + + private final SortedDocValues sortedDocValues; + private final List values = new ArrayList<>(); + + private List currentPosition = null; + + private TestBlock(@Nullable SortedDocValues sortedDocValues) { + this.sortedDocValues = sortedDocValues; + } + + public Object get(int i) { + return values.get(i); + } + + public int size() { + return values.size(); + } + + @Override + public TestBlock appendNull() { + assertNull(currentPosition); + values.add(null); + return this; + } + + @Override + public TestBlock beginPositionEntry() { + assertNull(currentPosition); + currentPosition = new ArrayList<>(); + values.add(currentPosition); + return this; + } + + @Override + public TestBlock endPositionEntry() { + assertNotNull(currentPosition); + currentPosition = null; + return this; + } + + @Override + public TestBlock appendBoolean(boolean value) { + return add(value); + } + + @Override + public TestBlock appendBytesRef(BytesRef value) { + return add(BytesRef.deepCopyOf(value)); + } + + @Override + public TestBlock appendDouble(double value) { + return add(value); + } + + @Override + public TestBlock appendInt(int value) { + return add(value); + } + + @Override + public TestBlock appendLong(long value) { + return add(value); + } + + @Override + public TestBlock appendOrd(int value) { + try { + return add(sortedDocValues.lookupOrd(value)); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + @Override + public TestBlock build() { + return this; + } + + private TestBlock add(Object value) { + (currentPosition == null ? values : currentPosition).add(value); + return this; + } + + @Override + public void close() { + // TODO assert that we close the test blocks + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java index 6b6577553c64b..12f5989d560aa 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/RestoreOnlyRepository.java @@ -32,6 +32,7 @@ import java.util.Collection; import java.util.Collections; +import java.util.concurrent.Executor; import static java.util.Collections.emptyList; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; @@ -75,7 +76,7 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { final IndexId indexId = new IndexId(indexName, "blah"); listener.onResponse( new RepositoryData( diff --git a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java index 4a9a7cba2abed..6fc6b349fc989 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/recovery/AbstractIndexRecoveryIntegTestCase.java @@ -139,7 +139,7 @@ protected void checkTransientErrorsDuringRecoveryAreRetried(String recoveryActio assertFalse(stateResponse.getState().getRoutingNodes().node(blueNodeId).isEmpty()); - assertHitCount(client().prepareSearch(indexName), numDocs); + assertHitCount(prepareSearch(indexName), numDocs); logger.info("--> will temporarily interrupt recovery action between blue & red on [{}]", recoveryActionToBlock); @@ -147,14 +147,8 @@ protected void checkTransientErrorsDuringRecoveryAreRetried(String recoveryActio createSnapshotThatCanBeUsedDuringRecovery(indexName); } - MockTransportService blueTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - blueNodeName - ); - MockTransportService redTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - redNodeName - ); + final var blueTransportService = MockTransportService.getInstance(blueNodeName); + final var redTransportService = MockTransportService.getInstance(redNodeName); final AtomicBoolean recoveryStarted = new AtomicBoolean(false); final AtomicBoolean finalizeReceived = new AtomicBoolean(false); @@ -241,7 +235,7 @@ public void checkDisconnectsWhileRecovering(String recoveryActionToBlock) throws assertFalse(stateResponse.getState().getRoutingNodes().node(blueNodeId).isEmpty()); - assertHitCount(client().prepareSearch(indexName), numDocs); + assertHitCount(prepareSearch(indexName), numDocs); final boolean dropRequests = randomBoolean(); logger.info("--> will {} between blue & red on [{}]", dropRequests ? "drop requests" : "break connection", recoveryActionToBlock); @@ -251,14 +245,8 @@ public void checkDisconnectsWhileRecovering(String recoveryActionToBlock) throws createSnapshotThatCanBeUsedDuringRecovery(indexName); } - MockTransportService blueMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - blueNodeName - ); - MockTransportService redMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - redNodeName - ); + final var blueMockTransportService = MockTransportService.getInstance(blueNodeName); + final var redMockTransportService = MockTransportService.getInstance(redNodeName); TransportService redTransportService = internalCluster().getInstance(TransportService.class, redNodeName); TransportService blueTransportService = internalCluster().getInstance(TransportService.class, blueNodeName); final CountDownLatch requestFailed = new CountDownLatch(1); @@ -342,24 +330,15 @@ public void checkDisconnectsDuringRecovery(boolean useSnapshotBasedRecoveries) t } indexRandom(true, requests); ensureSearchable(indexName); - assertHitCount(client().prepareSearch(indexName), numDocs); + assertHitCount(prepareSearch(indexName), numDocs); if (useSnapshotBasedRecoveries) { createSnapshotThatCanBeUsedDuringRecovery(indexName); } - MockTransportService masterTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - masterNodeName - ); - MockTransportService blueMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - blueNodeName - ); - MockTransportService redMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - redNodeName - ); + final var masterTransportService = MockTransportService.getInstance(masterNodeName); + final var blueMockTransportService = MockTransportService.getInstance(blueNodeName); + final var redMockTransportService = MockTransportService.getInstance(redNodeName); redMockTransportService.addSendBehavior(blueMockTransportService, new StubbableTransport.SendRequestBehavior() { private final AtomicInteger count = new AtomicInteger(); @@ -457,7 +436,7 @@ public void sendRequest( } for (int i = 0; i < 10; i++) { - assertHitCount(client().prepareSearch(indexName), numDocs); + assertHitCount(prepareSearch(indexName), numDocs); } } @@ -500,7 +479,6 @@ private static void createSnapshotThatCanBeUsedDuringRecovery(String indexName) .put(BlobStoreRepository.USE_FOR_PEER_RECOVERY_SETTING.getKey(), true) .put("compress", false) ) - .get() ); // create snapshot diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 3dd8c74ef4a38..d2a2f865ffd7a 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -277,8 +277,8 @@ private MockNode( ) { super(NodeConstruction.prepareConstruction(environment, new MockServiceProvider() { @Override - Function pluginsServiceCtor(Environment initialEnvironment) { - return settings -> new MockPluginsService(settings, initialEnvironment, classpathPlugins); + PluginsService newPluginService(Environment environment, Settings settings) { + return new MockPluginsService(settings, environment, classpathPlugins); } }, forbidPrivateIndexSettings)); diff --git a/test/framework/src/main/java/org/elasticsearch/readiness/MockReadinessService.java b/test/framework/src/main/java/org/elasticsearch/readiness/MockReadinessService.java index 6b58a6ce36117..e5841071a787b 100644 --- a/test/framework/src/main/java/org/elasticsearch/readiness/MockReadinessService.java +++ b/test/framework/src/main/java/org/elasticsearch/readiness/MockReadinessService.java @@ -29,6 +29,10 @@ public class MockReadinessService extends ReadinessService { */ public static class TestPlugin extends Plugin {} + private static final int RETRIES = 3; + + private static final int RETRY_DELAY_IN_MILLIS = 10; + private static final String METHOD_NOT_MOCKED = "This method has not been mocked"; private static class MockServerSocketChannel extends ServerSocketChannel { @@ -90,20 +94,30 @@ public MockReadinessService(ClusterService clusterService, Environment environme super(clusterService, environment, MockServerSocketChannel::openMock); } - static void tcpReadinessProbeTrue(ReadinessService readinessService) { + private static boolean socketIsOpen(ReadinessService readinessService) { ServerSocketChannel mockedSocket = readinessService.serverChannel(); - if (mockedSocket == null) { - throw new AssertionError("Mocked socket not created for this node"); - } - if (mockedSocket.isOpen() == false) { - throw new AssertionError("Readiness socket should be open"); + return mockedSocket != null && mockedSocket.isOpen(); + } + + public static void tcpReadinessProbeTrue(ReadinessService readinessService) throws InterruptedException { + for (int i = 1; i <= RETRIES; ++i) { + if (socketIsOpen(readinessService)) { + return; + } + Thread.sleep(RETRY_DELAY_IN_MILLIS * i); } + + throw new AssertionError("Readiness socket should be open"); } - static void tcpReadinessProbeFalse(ReadinessService readinessService) { - ServerSocketChannel mockedSocket = readinessService.serverChannel(); - if (mockedSocket != null && mockedSocket.isOpen()) { - throw new AssertionError("Readiness socket should be closed"); + public static void tcpReadinessProbeFalse(ReadinessService readinessService) throws InterruptedException { + for (int i = 0; i < RETRIES; ++i) { + if (socketIsOpen(readinessService) == false) { + return; + } + Thread.sleep(RETRY_DELAY_IN_MILLIS * i); } + + throw new AssertionError("Readiness socket should be closed"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java index 61be2eb433618..40c336376d6f9 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.blobstore.support.BlobMetadata; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Streams; import org.elasticsearch.core.TimeValue; import org.elasticsearch.repositories.IndexId; @@ -32,6 +33,7 @@ import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.repositories.RepositoryMissingException; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotRestoreException; import org.elasticsearch.test.ESIntegTestCase; @@ -67,7 +69,7 @@ public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase { public static RepositoryData getRepositoryData(Repository repository) { - return PlainActionFuture.get(repository::getRepositoryData); + return AbstractSnapshotIntegTestCase.getRepositoryData(repository); } protected abstract String repositoryType(); @@ -295,7 +297,7 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t docCounts[i] = iterations(10, 1000); logger.info("--> create random index {} with {} records", indexNames[i], docCounts[i]); addRandomDocuments(indexNames[i], docCounts[i]); - assertHitCount(client().prepareSearch(indexNames[i]).setSize(0), docCounts[i]); + assertHitCount(prepareSearch(indexNames[i]).setSize(0), docCounts[i]); } final String snapshotName = randomName(); @@ -319,7 +321,7 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t logger.info("--> add random documents to {}", index); addRandomDocuments(index, randomIntBetween(10, 1000)); } else { - int docCount = (int) client().prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; + int docCount = (int) prepareSearch(index).setSize(0).get().getHits().getTotalHits().value; int deleteCount = randomIntBetween(1, docCount); logger.info("--> delete {} random documents from {}", deleteCount, index); for (int i = 0; i < deleteCount; i++) { @@ -348,7 +350,7 @@ protected void testSnapshotAndRestore(boolean recreateRepositoryBeforeRestore) t ensureGreen(TimeValue.timeValueSeconds(120)); for (int i = 0; i < indexCount; i++) { - assertHitCount(client().prepareSearch(indexNames[i]).setSize(0), docCounts[i]); + assertHitCount(prepareSearch(indexNames[i]).setSize(0), docCounts[i]); } logger.info("--> delete snapshot {}:{}", repoName, snapshotName); @@ -392,7 +394,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { addRandomDocuments(indexName, docCount); } // Check number of documents in this iteration - docCounts[i] = (int) client().prepareSearch(indexName).setSize(0).get().getHits().getTotalHits().value; + docCounts[i] = (int) prepareSearch(indexName).setSize(0).get().getHits().getTotalHits().value; logger.info("--> create snapshot {}:{} with {} documents", repoName, snapshotName + "-" + i, docCounts[i]); assertSuccessfulSnapshot( clusterAdmin().prepareCreateSnapshot(repoName, snapshotName + "-" + i).setWaitForCompletion(true).setIndices(indexName) @@ -415,7 +417,7 @@ public void testMultipleSnapshotAndRollback() throws Exception { ); ensureGreen(); - assertHitCount(client().prepareSearch(indexName).setSize(0), docCounts[iterationToRestore]); + assertHitCount(prepareSearch(indexName).setSize(0), docCounts[iterationToRestore]); } for (int i = 0; i < iterationCount; i++) { @@ -474,7 +476,7 @@ public void testIndicesDeletedFromRepository() throws Exception { final PlainActionFuture repositoryData = PlainActionFuture.newFuture(); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { indicesBlobContainer.set(repository.blobStore().blobContainer(repository.basePath().add("indices"))); - repository.getRepositoryData(repositoryData); + repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, repositoryData); }); for (IndexId indexId : repositoryData.actionGet().getIndices().values()) { diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java index c51869d5b0797..8e94b3fa41fcf 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESFsBasedRepositoryIntegTestCase.java @@ -46,7 +46,7 @@ public void testMissingDirectoriesNotCreatedInReadonlyRepository() throws IOExce int docCount = iterations(10, 1000); logger.info("--> create random index {} with {} records", indexName, docCount); addRandomDocuments(indexName, docCount); - assertHitCount(client().prepareSearch(indexName).setSize(0), docCount); + assertHitCount(prepareSearch(indexName).setSize(0), docCount); final String snapshotName = randomName(); logger.info("--> create snapshot {}:{}", repoName, snapshotName); diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java index a5bed4f75a7b9..1e66dd061d9b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESMockAPIBasedRepositoryIntegTestCase.java @@ -166,7 +166,7 @@ public final void testSnapshotWithLargeSegmentFiles() throws Exception { flushAndRefresh(index); ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); final String snapshot = "snapshot"; assertSuccessfulSnapshot(clusterAdmin().prepareCreateSnapshot(repository, snapshot).setWaitForCompletion(true).setIndices(index)); @@ -175,7 +175,7 @@ public final void testSnapshotWithLargeSegmentFiles() throws Exception { assertSuccessfulRestore(clusterAdmin().prepareRestoreSnapshot(repository, snapshot).setWaitForCompletion(true)); ensureGreen(index); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); assertAcked(clusterAdmin().prepareDeleteSnapshot(repository, snapshot).get()); } @@ -193,7 +193,7 @@ public void testRequestStats() throws Exception { flushAndRefresh(index); ForceMergeResponse forceMerge = client().admin().indices().prepareForceMerge(index).setFlush(true).setMaxNumSegments(1).get(); assertThat(forceMerge.getSuccessfulShards(), equalTo(1)); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); final String snapshot = "snapshot"; assertSuccessfulSnapshot(clusterAdmin().prepareCreateSnapshot(repository, snapshot).setWaitForCompletion(true).setIndices(index)); @@ -202,7 +202,7 @@ public void testRequestStats() throws Exception { assertSuccessfulRestore(clusterAdmin().prepareRestoreSnapshot(repository, snapshot).setWaitForCompletion(true)); ensureGreen(index); - assertHitCount(client().prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); + assertHitCount(prepareSearch(index).setSize(0).setTrackTotalHits(true), nbDocs); assertAcked(clusterAdmin().prepareDeleteSnapshot(repository, snapshot).get()); diff --git a/test/framework/src/main/java/org/elasticsearch/rest/RestResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/rest/RestResponseUtils.java index 2562b53350272..dfbd7266cc4a2 100644 --- a/test/framework/src/main/java/org/elasticsearch/rest/RestResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/rest/RestResponseUtils.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.io.StringWriter; @@ -45,7 +46,7 @@ public static BytesReference getBodyContent(RestResponse restResponse) { out.flush(); return out.bytes(); } catch (Exception e) { - throw new AssertionError("unexpected", e); + return ESTestCase.fail(e); } } @@ -57,7 +58,7 @@ public static String getTextBodyContent(Iterator /** * Return a random {@link GeoBoundingBox} within the bounds of the tile grid. */ - protected abstract GeoBoundingBox randomBBox(); + protected abstract GeoBoundingBox randomBBox(int precision); /** * Return the bounding tile as a {@link Rectangle} for a given point @@ -131,23 +131,24 @@ public void testUnmappedMissing() throws IOException { } public void testSingletonDocs() throws IOException { - testWithSeveralDocs(() -> true, null); + testWithSeveralDocs(() -> true, null, randomPrecision()); } public void testBoundedSingletonDocs() throws IOException { - testWithSeveralDocs(() -> true, randomBBox()); + int precision = randomPrecision(); + testWithSeveralDocs(() -> true, randomBBox(precision), precision); } public void testMultiValuedDocs() throws IOException { - testWithSeveralDocs(LuceneTestCase::rarely, null); + testWithSeveralDocs(LuceneTestCase::rarely, null, randomPrecision()); } public void testBoundedMultiValuedDocs() throws IOException { - testWithSeveralDocs(LuceneTestCase::rarely, randomBBox()); + int precision = randomPrecision(); + testWithSeveralDocs(LuceneTestCase::rarely, randomBBox(precision), precision); } - private void testWithSeveralDocs(BooleanSupplier supplier, GeoBoundingBox bbox) throws IOException { - int precision = randomPrecision(); + private void testWithSeveralDocs(BooleanSupplier supplier, GeoBoundingBox bbox, int precision) throws IOException { int numPoints = randomIntBetween(8, 128); Map expectedCountPerGeoHash = new HashMap<>(); testCase(new MatchAllDocsQuery(), FIELD_NAME, precision, bbox, geoHashGrid -> { @@ -185,23 +186,24 @@ private void testWithSeveralDocs(BooleanSupplier supplier, GeoBoundingBox bbox) } public void testSingletonDocsAsSubAgg() throws IOException { - testWithSeveralDocsAsSubAgg(() -> true, null); + testWithSeveralDocsAsSubAgg(() -> true, null, randomPrecision()); } public void testBoundedSingletonDocsAsSubAgg() throws IOException { - testWithSeveralDocsAsSubAgg(() -> true, randomBBox()); + int precision = randomPrecision(); + testWithSeveralDocsAsSubAgg(() -> true, randomBBox(precision), precision); } public void testMultiValuedDocsAsSubAgg() throws IOException { - testWithSeveralDocsAsSubAgg(LuceneTestCase::rarely, null); + testWithSeveralDocsAsSubAgg(LuceneTestCase::rarely, null, randomPrecision()); } public void testBoundedMultiValuedDocsAsSubAgg() throws IOException { - testWithSeveralDocsAsSubAgg(LuceneTestCase::rarely, randomBBox()); + int precision = randomPrecision(); + testWithSeveralDocsAsSubAgg(LuceneTestCase::rarely, randomBBox(precision), precision); } - private void testWithSeveralDocsAsSubAgg(BooleanSupplier supplier, GeoBoundingBox bbox) throws IOException { - int precision = randomPrecision(); + private void testWithSeveralDocsAsSubAgg(BooleanSupplier supplier, GeoBoundingBox bbox, int precision) throws IOException { int numPoints = randomIntBetween(8, 128); Map> expectedCountPerTPerGeoHash = new TreeMap<>(); TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("t").field("t").size(numPoints); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index 1af8eb8dab82b..7b4e591051e61 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -30,7 +30,7 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -229,12 +229,11 @@ public void setupSuiteScopeCluster() throws Exception { // value for NUMBER_FIELD_NAME. This will check that after random indexing each document only has 1 value for // NUMBER_FIELD_NAME and it is the correct value. Following this initial change its seems that this call was getting // more that 2000 hits (actual value was 2059) so now it will also check to ensure all hits have the correct index and type. - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) - .addStoredField(NUMBER_FIELD_NAME) + SearchResponse response = prepareSearch(HIGH_CARD_IDX_NAME).addStoredField(NUMBER_FIELD_NAME) .addSort(SortBuilders.fieldSort(NUMBER_FIELD_NAME).order(SortOrder.ASC)) .setSize(5000) .get(); - assertSearchResponse(response); + assertNoFailures(response); long totalHits = response.getHits().getTotalHits().value; XContentBuilder builder = XContentFactory.jsonBuilder(); ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index f211216226285..664590d65c818 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -18,7 +17,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -34,99 +33,105 @@ public abstract class CentroidAggregationTestBase extends AbstractGeoTestCase { protected abstract ValuesSourceAggregationBuilder centroidAgg(String name); public void testEmptyAggregation() { - SearchResponse response = client().prepareSearch(EMPTY_IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)) - .get(); - assertSearchResponse(response); - - CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); - assertThat(geoCentroid, notNullValue()); - assertThat(geoCentroid.getName(), equalTo(aggName())); - assertThat(geoCentroid.centroid(), equalTo(null)); - assertEquals(0, geoCentroid.count()); + assertNoFailuresAndResponse( + client().prepareSearch(EMPTY_IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), + response -> { + CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); + assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(geoCentroid, notNullValue()); + assertThat(geoCentroid.getName(), equalTo(aggName())); + assertThat(geoCentroid.centroid(), equalTo(null)); + assertEquals(0, geoCentroid.count()); + } + ); + } public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch(UNMAPPED_IDX_NAME) - .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)) - .get(); - assertSearchResponse(response); - - CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(geoCentroid, notNullValue()); - assertThat(geoCentroid.getName(), equalTo(aggName())); - assertThat(geoCentroid.centroid(), equalTo(null)); - assertEquals(0, geoCentroid.count()); + assertNoFailuresAndResponse( + client().prepareSearch(UNMAPPED_IDX_NAME).addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), + response -> { + CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); + assertThat(geoCentroid, notNullValue()); + assertThat(geoCentroid.getName(), equalTo(aggName())); + assertThat(geoCentroid.centroid(), equalTo(null)); + assertEquals(0, geoCentroid.count()); + } + ); } public void testPartiallyUnmapped() { - SearchResponse response = client().prepareSearch(IDX_NAME, UNMAPPED_IDX_NAME) - .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)) - .get(); - assertSearchResponse(response); - - CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(geoCentroid, notNullValue()); - assertThat(geoCentroid.getName(), equalTo(aggName())); - assertSameCentroid(geoCentroid.centroid(), singleCentroid); - assertEquals(numDocs, geoCentroid.count()); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME, UNMAPPED_IDX_NAME).addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), + response -> { + CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); + assertThat(geoCentroid, notNullValue()); + assertThat(geoCentroid.getName(), equalTo(aggName())); + assertSameCentroid(geoCentroid.centroid(), singleCentroid); + assertEquals(numDocs, geoCentroid.count()); + } + ); + } public void testSingleValuedField() { - SearchResponse response = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)) - .get(); - assertSearchResponse(response); - - CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(geoCentroid, notNullValue()); - assertThat(geoCentroid.getName(), equalTo(aggName())); - assertSameCentroid(geoCentroid.centroid(), singleCentroid); - assertEquals(numDocs, geoCentroid.count()); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), + response -> { + CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); + assertThat(geoCentroid, notNullValue()); + assertThat(geoCentroid.getName(), equalTo(aggName())); + assertSameCentroid(geoCentroid.centroid(), singleCentroid); + assertEquals(numDocs, geoCentroid.count()); + } + ); } public void testSingleValueFieldGetProperty() { - SearchResponse response = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME))) - .get(); - assertSearchResponse(response); - - Global global = response.getAggregations().get("global"); - assertThat(global, notNullValue()); - assertThat(global.getName(), equalTo("global")); - assertThat(global.getDocCount(), equalTo((long) numDocs)); - assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); - - CentroidAggregation geoCentroid = global.getAggregations().get(aggName()); - InternalAggregation agg = (InternalAggregation) global; - assertThat(geoCentroid, notNullValue()); - assertThat(geoCentroid.getName(), equalTo(aggName())); - assertThat((CentroidAggregation) agg.getProperty(aggName()), sameInstance(geoCentroid)); - assertSameCentroid(geoCentroid.centroid(), singleCentroid); - assertSimilarValue(((SpatialPoint) agg.getProperty(aggName() + ".value")).getY(), singleCentroid.getY()); - assertSimilarValue(((SpatialPoint) agg.getProperty(aggName() + ".value")).getX(), singleCentroid.getX()); - assertSimilarValue((double) agg.getProperty(aggName() + "." + coordinateName("y")), singleCentroid.getY()); - assertSimilarValue((double) agg.getProperty(aggName() + "." + coordinateName("x")), singleCentroid.getX()); - assertEquals(numDocs, (long) ((InternalAggregation) global).getProperty(aggName() + ".count")); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME))), + response -> { + Global global = response.getAggregations().get("global"); + assertThat(global, notNullValue()); + assertThat(global.getName(), equalTo("global")); + assertThat(global.getDocCount(), equalTo((long) numDocs)); + assertThat(global.getAggregations(), notNullValue()); + assertThat(global.getAggregations().asMap().size(), equalTo(1)); + + CentroidAggregation geoCentroid = global.getAggregations().get(aggName()); + InternalAggregation agg = (InternalAggregation) global; + assertThat(geoCentroid, notNullValue()); + assertThat(geoCentroid.getName(), equalTo(aggName())); + assertThat((CentroidAggregation) agg.getProperty(aggName()), sameInstance(geoCentroid)); + assertSameCentroid(geoCentroid.centroid(), singleCentroid); + assertSimilarValue(((SpatialPoint) agg.getProperty(aggName() + ".value")).getY(), singleCentroid.getY()); + assertSimilarValue(((SpatialPoint) agg.getProperty(aggName() + ".value")).getX(), singleCentroid.getX()); + assertSimilarValue((double) agg.getProperty(aggName() + "." + coordinateName("y")), singleCentroid.getY()); + assertSimilarValue((double) agg.getProperty(aggName() + "." + coordinateName("x")), singleCentroid.getX()); + assertEquals(numDocs, (long) ((InternalAggregation) global).getProperty(aggName() + ".count")); + } + ); } public void testMultiValuedField() throws Exception { - SearchResponse searchResponse = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(centroidAgg(aggName()).field(MULTI_VALUED_FIELD_NAME)) - .get(); - assertSearchResponse(searchResponse); - - CentroidAggregation geoCentroid = searchResponse.getAggregations().get(aggName()); - assertThat(geoCentroid, notNullValue()); - assertThat(geoCentroid.getName(), equalTo(aggName())); - assertSameCentroid(geoCentroid.centroid(), multiCentroid); - assertEquals(2 * numDocs, geoCentroid.count()); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(centroidAgg(aggName()).field(MULTI_VALUED_FIELD_NAME)), + response -> { + CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); + assertThat(geoCentroid, notNullValue()); + assertThat(geoCentroid.getName(), equalTo(aggName())); + assertSameCentroid(geoCentroid.centroid(), multiCentroid); + assertEquals(2 * numDocs, geoCentroid.count()); + } + ); } /** Override this if the spatial data uses different coordinate names (eg. Geo uses lon/at instead of x/y */ diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index ac366576c8975..81c9c37ad4f9a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.aggregations.metrics; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.SpatialPoint; import org.elasticsearch.common.util.BigArray; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -23,7 +22,7 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -39,171 +38,176 @@ public abstract class SpatialBoundsAggregationTestBase e protected abstract void assertBoundsLimits(SpatialBounds spatialBounds); public void testSingleValuedField() throws Exception { - SearchResponse response = client().prepareSearch(IDX_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)).get(); - - assertSearchResponse(response); - - SpatialBounds geoBounds = response.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - T topLeft = geoBounds.topLeft(); - T bottomRight = geoBounds.bottomRight(); - assertThat(topLeft.getY(), closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE)); - assertThat(topLeft.getX(), closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getY(), closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getX(), closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE)); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), + response -> { + SpatialBounds geoBounds = response.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + T topLeft = geoBounds.topLeft(); + T bottomRight = geoBounds.bottomRight(); + assertThat(topLeft.getY(), closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE)); + assertThat(topLeft.getX(), closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getY(), closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getX(), closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE)); + } + ); + } public void testSingleValuedField_getProperty() { - SearchResponse searchResponse = client().prepareSearch(IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(global("global").subAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME))) - .get(); - - assertSearchResponse(searchResponse); - - Global global = searchResponse.getAggregations().get("global"); - assertThat(global, notNullValue()); - assertThat(global.getName(), equalTo("global")); - assertThat(global.getDocCount(), equalTo((long) numDocs)); - assertThat(global.getAggregations(), notNullValue()); - assertThat(global.getAggregations().asMap().size(), equalTo(1)); - - SpatialBounds geobounds = global.getAggregations().get(aggName()); - assertThat(geobounds, notNullValue()); - assertThat(geobounds.getName(), equalTo(aggName())); - assertThat((SpatialBounds) ((InternalAggregation) global).getProperty(aggName()), sameInstance(geobounds)); - T topLeft = geobounds.topLeft(); - T bottomRight = geobounds.bottomRight(); - assertThat(topLeft.getY(), closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE)); - assertThat(topLeft.getX(), closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getY(), closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getX(), closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE)); - assertThat( - (double) ((InternalAggregation) global).getProperty(aggName() + ".top"), - closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE) - ); - assertThat( - (double) ((InternalAggregation) global).getProperty(aggName() + ".left"), - closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE) - ); - assertThat( - (double) ((InternalAggregation) global).getProperty(aggName() + ".bottom"), - closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE) - ); - assertThat( - (double) ((InternalAggregation) global).getProperty(aggName() + ".right"), - closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE) + + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(global("global").subAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME))), + response -> { + Global global = response.getAggregations().get("global"); + assertThat(global, notNullValue()); + assertThat(global.getName(), equalTo("global")); + assertThat(global.getDocCount(), equalTo((long) numDocs)); + assertThat(global.getAggregations(), notNullValue()); + assertThat(global.getAggregations().asMap().size(), equalTo(1)); + + SpatialBounds geobounds = global.getAggregations().get(aggName()); + assertThat(geobounds, notNullValue()); + assertThat(geobounds.getName(), equalTo(aggName())); + assertThat((SpatialBounds) ((InternalAggregation) global).getProperty(aggName()), sameInstance(geobounds)); + T topLeft = geobounds.topLeft(); + T bottomRight = geobounds.bottomRight(); + assertThat(topLeft.getY(), closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE)); + assertThat(topLeft.getX(), closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getY(), closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getX(), closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE)); + assertThat( + (double) ((InternalAggregation) global).getProperty(aggName() + ".top"), + closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE) + ); + assertThat( + (double) ((InternalAggregation) global).getProperty(aggName() + ".left"), + closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE) + ); + assertThat( + (double) ((InternalAggregation) global).getProperty(aggName() + ".bottom"), + closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE) + ); + assertThat( + (double) ((InternalAggregation) global).getProperty(aggName() + ".right"), + closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE) + ); + } ); + } public void testMultiValuedField() throws Exception { - SearchResponse response = client().prepareSearch(IDX_NAME).addAggregation(boundsAgg(aggName(), MULTI_VALUED_FIELD_NAME)).get(); - - assertSearchResponse(response); - - SpatialBounds geoBounds = response.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - T topLeft = geoBounds.topLeft(); - T bottomRight = geoBounds.bottomRight(); - assertThat(topLeft.getY(), closeTo(multiTopLeft.getY(), GEOHASH_TOLERANCE)); - assertThat(topLeft.getX(), closeTo(multiTopLeft.getX(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getY(), closeTo(multiBottomRight.getY(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getX(), closeTo(multiBottomRight.getX(), GEOHASH_TOLERANCE)); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME).addAggregation(boundsAgg(aggName(), MULTI_VALUED_FIELD_NAME)), + response -> { + SpatialBounds geoBounds = response.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + T topLeft = geoBounds.topLeft(); + T bottomRight = geoBounds.bottomRight(); + assertThat(topLeft.getY(), closeTo(multiTopLeft.getY(), GEOHASH_TOLERANCE)); + assertThat(topLeft.getX(), closeTo(multiTopLeft.getX(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getY(), closeTo(multiBottomRight.getY(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getX(), closeTo(multiBottomRight.getX(), GEOHASH_TOLERANCE)); + } + ); } public void testUnmapped() throws Exception { - SearchResponse response = client().prepareSearch(UNMAPPED_IDX_NAME) - .addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)) - .get(); - - assertSearchResponse(response); - - SpatialBounds geoBounds = response.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - T topLeft = geoBounds.topLeft(); - T bottomRight = geoBounds.bottomRight(); - assertThat(topLeft, equalTo(null)); - assertThat(bottomRight, equalTo(null)); + assertNoFailuresAndResponse( + client().prepareSearch(UNMAPPED_IDX_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), + response -> { + SpatialBounds geoBounds = response.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + T topLeft = geoBounds.topLeft(); + T bottomRight = geoBounds.bottomRight(); + assertThat(topLeft, equalTo(null)); + assertThat(bottomRight, equalTo(null)); + } + ); } public void testPartiallyUnmapped() throws Exception { - SearchResponse response = client().prepareSearch(IDX_NAME, UNMAPPED_IDX_NAME) - .addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)) - .get(); - - assertSearchResponse(response); - - SpatialBounds geoBounds = response.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - T topLeft = geoBounds.topLeft(); - T bottomRight = geoBounds.bottomRight(); - assertThat(topLeft.getY(), closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE)); - assertThat(topLeft.getX(), closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getY(), closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE)); - assertThat(bottomRight.getX(), closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE)); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_NAME, UNMAPPED_IDX_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), + response -> { + SpatialBounds geoBounds = response.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + T topLeft = geoBounds.topLeft(); + T bottomRight = geoBounds.bottomRight(); + assertThat(topLeft.getY(), closeTo(singleTopLeft.getY(), GEOHASH_TOLERANCE)); + assertThat(topLeft.getX(), closeTo(singleTopLeft.getX(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getY(), closeTo(singleBottomRight.getY(), GEOHASH_TOLERANCE)); + assertThat(bottomRight.getX(), closeTo(singleBottomRight.getX(), GEOHASH_TOLERANCE)); + } + ); + } public void testEmptyAggregation() throws Exception { - SearchResponse searchResponse = client().prepareSearch(EMPTY_IDX_NAME) - .setQuery(matchAllQuery()) - .addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - SpatialBounds geoBounds = searchResponse.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - T topLeft = geoBounds.topLeft(); - T bottomRight = geoBounds.bottomRight(); - assertThat(topLeft, equalTo(null)); - assertThat(bottomRight, equalTo(null)); + assertNoFailuresAndResponse( + client().prepareSearch(EMPTY_IDX_NAME).setQuery(matchAllQuery()).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + SpatialBounds geoBounds = response.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + T topLeft = geoBounds.topLeft(); + T bottomRight = geoBounds.bottomRight(); + assertThat(topLeft, equalTo(null)); + assertThat(bottomRight, equalTo(null)); + } + ); + } /** * This test forces the bounds {@link MetricsAggregator} to resize the {@link BigArray}s it uses to ensure they are resized correctly */ public void testSingleValuedFieldAsSubAggToHighCardTermsAgg() { - SearchResponse response = client().prepareSearch(HIGH_CARD_IDX_NAME) - .addAggregation(terms("terms").field(NUMBER_FIELD_NAME).subAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME))) - .get(); - - assertSearchResponse(response); - - Terms terms = response.getAggregations().get("terms"); - assertThat(terms, notNullValue()); - assertThat(terms.getName(), equalTo("terms")); - List buckets = terms.getBuckets(); - assertThat(buckets.size(), equalTo(10)); - for (int i = 0; i < 10; i++) { - Bucket bucket = buckets.get(i); - assertThat(bucket, notNullValue()); - assertThat("InternalBucket " + bucket.getKey() + " has wrong number of documents", bucket.getDocCount(), equalTo(1L)); - SpatialBounds geoBounds = bucket.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - assertBoundsLimits(geoBounds); - } + + assertNoFailuresAndResponse( + client().prepareSearch(HIGH_CARD_IDX_NAME) + .addAggregation(terms("terms").field(NUMBER_FIELD_NAME).subAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME))), + response -> { + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(10)); + for (int i = 0; i < 10; i++) { + Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat("InternalBucket " + bucket.getKey() + " has wrong number of documents", bucket.getDocCount(), equalTo(1L)); + SpatialBounds geoBounds = bucket.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + assertBoundsLimits(geoBounds); + } + } + ); } public void testSingleValuedFieldWithZeroLon() { - SearchResponse response = client().prepareSearch(IDX_ZERO_NAME) - .addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)) - .get(); - - assertSearchResponse(response); - - SpatialBounds geoBounds = response.getAggregations().get(aggName()); - assertThat(geoBounds, notNullValue()); - assertThat(geoBounds.getName(), equalTo(aggName())); - T topLeft = geoBounds.topLeft(); - T bottomRight = geoBounds.bottomRight(); - assertThat(topLeft.getY(), closeTo(1.0, GEOHASH_TOLERANCE)); - assertThat(topLeft.getX(), closeTo(0.0, GEOHASH_TOLERANCE)); - assertThat(bottomRight.getY(), closeTo(1.0, GEOHASH_TOLERANCE)); - assertThat(bottomRight.getX(), closeTo(0.0, GEOHASH_TOLERANCE)); + assertNoFailuresAndResponse( + client().prepareSearch(IDX_ZERO_NAME).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), + response -> { + SpatialBounds geoBounds = response.getAggregations().get(aggName()); + assertThat(geoBounds, notNullValue()); + assertThat(geoBounds.getName(), equalTo(aggName())); + T topLeft = geoBounds.topLeft(); + T bottomRight = geoBounds.bottomRight(); + assertThat(topLeft.getY(), closeTo(1.0, GEOHASH_TOLERANCE)); + assertThat(topLeft.getX(), closeTo(0.0, GEOHASH_TOLERANCE)); + assertThat(bottomRight.getY(), closeTo(1.0, GEOHASH_TOLERANCE)); + assertThat(bottomRight.getX(), closeTo(0.0, GEOHASH_TOLERANCE)); + } + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 69698bc535b06..6abce04556316 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -47,7 +47,7 @@ import java.util.Map; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -115,7 +115,7 @@ public void testIndexPointsFilterRectangle() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry).relation(ShapeRelation.INTERSECTS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); @@ -123,7 +123,7 @@ public void testIndexPointsFilterRectangle() throws Exception { // default query, without specifying relation (expect intersects) searchResponse = client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, geometry)).get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); @@ -181,7 +181,7 @@ public void testIndexPointsPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); SearchHits searchHits = searchResponse.getHits(); assertThat(searchHits.getTotalHits().value, equalTo(1L)); assertThat(searchHits.getAt(0).getId(), equalTo("1")); @@ -223,7 +223,7 @@ public void testIndexPointsMultiPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.INTERSECTS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); @@ -234,7 +234,7 @@ public void testIndexPointsMultiPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.WITHIN)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(2)); assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); @@ -245,7 +245,7 @@ public void testIndexPointsMultiPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.DISJOINT)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); @@ -255,7 +255,7 @@ public void testIndexPointsMultiPolygon() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiPolygon).relation(ShapeRelation.CONTAINS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); assertThat(searchResponse.getHits().getHits().length, equalTo(0)); } @@ -283,7 +283,7 @@ public void testIndexPointsRectangle() throws Exception { .setQuery(queryBuilder().shapeQuery(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); @@ -341,7 +341,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { ) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("point2")); @@ -354,7 +354,7 @@ public void testIndexPointsIndexedRectangle() throws Exception { .indexedShapePath(indexedShapePath) ) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); } @@ -621,7 +621,7 @@ public void testIndexPointsFromLine() throws Exception { .setTrackTotalHits(true) .setQuery(queryBuilder().shapeQuery(defaultFieldName, line).relation(ShapeRelation.INTERSECTS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); SearchHits searchHits = searchResponse.getHits(); assertThat(searchHits.getTotalHits().value, equalTo((long) line.length())); } @@ -647,7 +647,7 @@ public void testIndexPointsFromPolygon() throws Exception { .setTrackTotalHits(true) .setQuery(queryBuilder().shapeQuery(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) .get(); - assertSearchResponse(searchResponse); + assertNoFailures(searchResponse); SearchHits searchHits = searchResponse.getHits(); assertThat(searchHits.getTotalHits().value, equalTo((long) linearRing.length())); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java index 3e9d840cb7ed8..97e68507f0c55 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeIntegTestCase.java @@ -12,7 +12,6 @@ import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; @@ -52,6 +51,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; @@ -184,8 +185,7 @@ public void testIgnoreMalformed() throws Exception { ); indexRandom(true, client().prepareIndex("test").setId("0").setSource("shape", polygonGeoJson)); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(matchAllQuery()).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(client().prepareSearch("test").setQuery(matchAllQuery()), 1L); } /** @@ -216,12 +216,11 @@ public void testIndexShapeRouting() throws Exception { }"""; indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON).setRouting("ABC")); - - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(queryBuilder().shapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount( + client().prepareSearch("test") + .setQuery(queryBuilder().shapeQuery("shape", "0").indexedShapeIndex("test").indexedShapeRouting("ABC")), + 1L + ); } public void testDisallowExpensiveQueries() throws InterruptedException, IOException { @@ -251,7 +250,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept SearchRequestBuilder builder = client().prepareSearch("test") .setQuery(queryBuilder().shapeQuery("shape", new Circle(0, 0, 77000))); if (allowExpensiveQueries()) { - assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(builder, 1L); } else { ElasticsearchException e = expectThrows(ElasticsearchException.class, builder::get); assertEquals( @@ -267,7 +266,7 @@ public void testDisallowExpensiveQueries() throws InterruptedException, IOExcept // Set search.allow_expensive_queries to "true" updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", true)); - assertThat(builder.get().getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(builder, 1L); } finally { updateClusterSettings(Settings.builder().put("search.allow_expensive_queries", (String) null)); } @@ -304,12 +303,13 @@ public void testShapeRelations() throws Exception { client().admin().indices().prepareRefresh().get(); // Point in polygon - SearchResponse result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(queryBuilder().intersectionQuery("area", new Point(3, 3))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); + assertResponse( + client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().intersectionQuery("area", new Point(3, 3))), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + } + ); // Point in polygon hole assertHitCount( @@ -322,20 +322,24 @@ public void testShapeRelations() throws Exception { // of the polygon NOT the hole // Point on polygon border - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(queryBuilder().intersectionQuery("area", new Point(10.0, 5.0))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); + assertResponse( + client().prepareSearch() + .setQuery(matchAllQuery()) + .setPostFilter(queryBuilder().intersectionQuery("area", new Point(10.0, 5.0))), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + } + ); // Point on hole border - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(queryBuilder().intersectionQuery("area", new Point(5.0, 2.0))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); + assertResponse( + client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().intersectionQuery("area", new Point(5.0, 2.0))), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + } + ); // Point not in polygon assertHitCount( @@ -344,12 +348,13 @@ public void testShapeRelations() throws Exception { ); // Point in polygon hole - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(queryBuilder().disjointQuery("area", new Point(4.5, 4.5))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("1")); + assertResponse( + client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().disjointQuery("area", new Point(4.5, 4.5))), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("1")); + } + ); // Create a polygon that fills the empty area of the polygon defined above Polygon inverse = new Polygon( @@ -362,12 +367,13 @@ public void testShapeRelations() throws Exception { client().admin().indices().prepareRefresh().get(); // re-check point on polygon hole - result = client().prepareSearch() - .setQuery(matchAllQuery()) - .setPostFilter(queryBuilder().intersectionQuery("area", new Point(4.5, 4.5))) - .get(); - assertHitCount(result, 1); - assertFirstHit(result, hasId("2")); + assertResponse( + client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().intersectionQuery("area", new Point(4.5, 4.5))), + response -> { + assertHitCount(response, 1L); + assertFirstHit(response, hasId("2")); + } + ); // Polygon WithIn Polygon Polygon WithIn = new Polygon(new LinearRing(new double[] { -30, -30, 30, 30, -30 }, new double[] { -30, 30, 30, -30, -30 })); @@ -393,7 +399,7 @@ public void testShapeRelations() throws Exception { assertHitCount( client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().intersectionQuery("area", new Point(174, -4))), - 1 + 1L ); // In geo coordinates the polygon wraps the dateline, so we need to search within valid longitude ranges @@ -402,15 +408,15 @@ public void testShapeRelations() throws Exception { client().prepareSearch() .setQuery(matchAllQuery()) .setPostFilter(queryBuilder().intersectionQuery("area", new Point(xWrapped, -4))), - 1 + 1L ); assertHitCount( client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().intersectionQuery("area", new Point(180, -4))), - 0 + 0L ); assertHitCount( client().prepareSearch().setQuery(matchAllQuery()).setPostFilter(queryBuilder().intersectionQuery("area", new Point(180, -6))), - 1 + 1L ); } @@ -435,16 +441,15 @@ public void testBulk() throws Exception { assertFalse("unable to index data: " + item.getFailureMessage(), item.isFailed()); } - client().admin().indices().prepareRefresh().get(); + assertNoFailures(client().admin().indices().prepareRefresh().get()); String key = "DE"; - SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("_id", key)).get(); - - assertHitCount(searchResponse, 1); - - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), equalTo(key)); - } + assertResponse(client().prepareSearch().setQuery(matchQuery("_id", key)), response -> { + assertHitCount(response, 1); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), equalTo(key)); + } + }); // We extract this to another method to allow some tests to ignore this part doDistanceAndBoundingBoxTest(key); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java index b1319e59e8515..5da9103d49771 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.geo; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.ShapeRelation; @@ -29,7 +28,6 @@ import org.elasticsearch.index.query.AbstractGeometryQueryBuilder; import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -43,7 +41,8 @@ import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -93,9 +92,7 @@ public void testFieldAlias() throws IOException { .setSource(GeoJson.toXContent(multiPoint, jsonBuilder().startObject().field(defaultFieldName), null).endObject()) .setRefreshPolicy(IMMEDIATE) .get(); - - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery("alias", multiPoint)).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery("alias", multiPoint)), 1L); } public void testShapeFetchingPath() throws Exception { @@ -149,50 +146,40 @@ public void testShapeFetchingPath() throws Exception { .relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath(defaultFieldName); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 1L); + filter = queryBuilder().shapeQuery(defaultFieldName, "1") .relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.geo"); - result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 1L); + filter = queryBuilder().shapeQuery(defaultFieldName, "1") .relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.2.geo"); - result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 1L); + filter = queryBuilder().shapeQuery(defaultFieldName, "1") .relation(ShapeRelation.INTERSECTS) .indexedShapeIndex("shapes") .indexedShapePath("1.2.3.geo"); - result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 1L); // now test the query variant QueryBuilder query = queryBuilder().shapeQuery(defaultFieldName, "1") .indexedShapeIndex("shapes") .indexedShapePath(defaultFieldName); - result = client().prepareSearch(defaultIndexName).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(query), 1L); + query = queryBuilder().shapeQuery(defaultFieldName, "1").indexedShapeIndex("shapes").indexedShapePath("1.geo"); - result = client().prepareSearch(defaultIndexName).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(query), 1L); + query = queryBuilder().shapeQuery(defaultFieldName, "1").indexedShapeIndex("shapes").indexedShapePath("1.2.geo"); - result = client().prepareSearch(defaultIndexName).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(query), 1L); + query = queryBuilder().shapeQuery(defaultFieldName, "1").indexedShapeIndex("shapes").indexedShapePath("1.2.3.geo"); - result = client().prepareSearch(defaultIndexName).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(query), 1L); } public void testRandomGeoCollectionQuery() throws Exception { @@ -219,9 +206,9 @@ public void testRandomGeoCollectionQuery() throws Exception { GeometryCollection queryCollection = new GeometryCollection<>(queryGeometries); QueryBuilder intersects = queryBuilder().intersectionQuery(defaultFieldName, queryCollection); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(intersects).get(); - assertSearchResponse(result); - assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), result.getHits().getTotalHits().value > 0); + assertNoFailuresAndResponse(client().prepareSearch(defaultIndexName).setQuery(intersects), response -> { + assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value > 0); + }); } public void testGeometryCollectionRelations() throws Exception { @@ -243,18 +230,24 @@ public void testGeometryCollectionRelations() throws Exception { geometries.add(new Point(1, 2)); geometries.add(new Point(-2, -1)); GeometryCollection collection = new GeometryCollection<>(geometries); - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.CONTAINS)), + 1L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.INTERSECTS)), + 1L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.DISJOINT)), + 0L + ); } { // A geometry collection that is partially within the indexed shape @@ -262,18 +255,24 @@ public void testGeometryCollectionRelations() throws Exception { geometries.add(new Point(1, 2)); geometries.add(new Point(20, 30)); GeometryCollection collection = new GeometryCollection<>(geometries); - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.CONTAINS)), + 0L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.INTERSECTS)), + 1L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.DISJOINT)), + 0L + ); } { // A geometry collection that is disjoint with the indexed shape @@ -281,18 +280,24 @@ public void testGeometryCollectionRelations() throws Exception { geometries.add(new Point(-20, -30)); geometries.add(new Point(20, 30)); GeometryCollection collection = new GeometryCollection<>(geometries); - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.CONTAINS)), + 0L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.INTERSECTS)), + 0L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, collection).relation(ShapeRelation.DISJOINT)), + 1L + ); } } @@ -346,14 +351,15 @@ public void testEdgeCases() throws Exception { // This search would fail if both geoshape indexing and geoshape filtering // used the bottom-level optimization in SpatialPrefixTree#recursiveGetNodes. - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().intersectionQuery(defaultFieldName, query)) - .get(); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, query)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("blakely")); + } + ); - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("blakely")); } public void testIndexedShapeReferenceSourceDisabled() throws Exception { @@ -392,11 +398,10 @@ public void testPointQuery() throws Exception { .endObject(); client().prepareIndex(defaultIndexName).setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); - SearchResponse result = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().intersectionQuery(defaultFieldName, point)) - .get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, point)), + 1L + ); } public void testContainsShapeQuery() throws Exception { @@ -407,9 +412,8 @@ public void testContainsShapeQuery() throws Exception { XContentBuilder docSource = GeoJson.toXContent(polygon, jsonBuilder().startObject().field(defaultFieldName), null).endObject(); client().prepareIndex(defaultIndexName).setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); QueryBuilder filter = queryBuilder().shapeQuery(defaultFieldName, innerPolygon).relation(ShapeRelation.CONTAINS); - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(filter).get(); - assertSearchResponse(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(filter), 1L); } public void testExistsQuery() throws Exception { @@ -423,9 +427,7 @@ public void testExistsQuery() throws Exception { client().prepareIndex(defaultIndexName).setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); ExistsQueryBuilder eqb = existsQuery(defaultFieldName); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(eqb).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(eqb), 1L); } public void testIndexedShapeReference() throws Exception { @@ -457,23 +459,23 @@ public void testIndexedShapeReference() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().intersectionQuery(defaultFieldName, "Big_Rectangle")) - .get(); - - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); - - searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, "Big_Rectangle")) - .get(); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, "Big_Rectangle")), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().shapeQuery(defaultFieldName, "Big_Rectangle")), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testQueryRandomGeoCollection() throws Exception { @@ -489,11 +491,10 @@ public void testQueryRandomGeoCollection() throws Exception { XContentBuilder docSource = GeoJson.toXContent(gcb, jsonBuilder().startObject().field(defaultFieldName), null).endObject(); client().prepareIndex(defaultIndexName).setId("1").setSource(docSource).setRefreshPolicy(IMMEDIATE).get(); - SearchResponse result = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().intersectionQuery(defaultFieldName, polygon)) - .get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, polygon)), + 1L + ); } public void testShapeFilterWithDefinedGeoCollection() throws Exception { @@ -538,28 +539,20 @@ public void testShapeFilterWithDefinedGeoCollection() throws Exception { { QueryBuilder filter = queryBuilder().intersectionQuery(defaultFieldName, new GeometryCollection<>(List.of(polygon1))); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 1L); } { QueryBuilder filter = queryBuilder().intersectionQuery(defaultFieldName, new GeometryCollection<>(List.of(polygon2))); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 0); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 0L); } { QueryBuilder filter = queryBuilder().intersectionQuery(defaultFieldName, new GeometryCollection<>(List.of(polygon1, polygon2))); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 1L); } { // no shape QueryBuilder filter = queryBuilder().shapeQuery(defaultFieldName, GeometryCollection.EMPTY); - SearchResponse result = client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 0); + assertHitCountAndNoFailures(client().prepareSearch(defaultIndexName).setQuery(matchAllQuery()).setPostFilter(filter), 0L); } } @@ -592,22 +585,29 @@ public void testDistanceQuery() throws Exception { ).setRefreshPolicy(IMMEDIATE) ).actionGet(); - SearchResponse response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.WITHIN)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - response = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.WITHIN)), + 2L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), + 2L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.DISJOINT)), + 2L + ); + + assertHitCount( + client().prepareSearch(defaultIndexName) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, circle).relation(ShapeRelation.CONTAINS)), + 0L + ); } public void testIndexLineQueryPoints() throws Exception { @@ -623,13 +623,12 @@ public void testIndexLineQueryPoints() throws Exception { // all points from a line intersect with the line for (int i = 0; i < line.length(); i++) { Point point = new Point(line.getLon(i), line.getLat(i)); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.INTERSECTS)) - .get(); - assertSearchResponse(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.INTERSECTS)), + 1L + ); } } @@ -648,13 +647,12 @@ public void testIndexPolygonQueryPoints() throws Exception { LinearRing linearRing = polygon.getPolygon(); for (int i = 0; i < linearRing.length(); i++) { Point point = new Point(linearRing.getLon(i), linearRing.getLat(i)); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.INTERSECTS)) - .get(); - assertSearchResponse(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, point).relation(ShapeRelation.INTERSECTS)), + 1L + ); } } @@ -681,13 +679,12 @@ public void testNeighbours() throws Exception { .get(); } Geometry center = WellKnownText.fromWKT(StandardValidator.instance(false), false, polygons[0]); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setTrackTotalHits(true) - .setQuery(queryBuilder().shapeQuery(defaultFieldName, center).relation(ShapeRelation.INTERSECTS)) - .get(); - assertSearchResponse(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo((long) polygons.length)); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setTrackTotalHits(true) + .setQuery(queryBuilder().shapeQuery(defaultFieldName, center).relation(ShapeRelation.INTERSECTS)), + polygons.length + ); } protected abstract Line makeRandomLine(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java index 96b7ff7a8a38a..7cc53560e8403 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.search.geo; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; @@ -26,6 +25,8 @@ import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -103,60 +104,79 @@ public void testSimpleBoundingBoxTest() throws Exception { client().admin().indices().prepareRefresh().get(); - SearchResponse searchResponse = client().prepareSearch() // from NY - .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // top == bottom && left == right - .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), equalTo("1")); - } - - searchResponse = client().prepareSearch() // top == bottom - .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), equalTo("2")); - } - - searchResponse = client().prepareSearch() // left == right - .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), equalTo("2")); - } + assertResponse( + client().prepareSearch() // from NY + .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); + } + } + ); + + assertResponse( + client().prepareSearch() // from NY + .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); + } + } + ); + + assertResponse( + client().prepareSearch() // top == bottom && left == right + .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), equalTo("1")); + } + } + ); + + assertResponse( + client().prepareSearch() // top == bottom + .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), equalTo("2")); + } + } + ); + + assertResponse( + client().prepareSearch() // left == right + .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), equalTo("2")); + } + } + ); // Distance query - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); - } + assertResponse( + client().prepareSearch() // from NY + .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + for (SearchHit hit : response.getHits()) { + assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); + } + } + ); + } public void testLimit2BoundingBox() throws Exception { @@ -189,121 +209,128 @@ public void testLimit2BoundingBox() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 880)) - .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 880)) - .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 534)) - .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 534)) - .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 880)) + .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) + ), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 534)) + .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) + ), + 1L + ); + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 534)) + .filter(geoBoundingBoxQuery("location").setCorners(74.579421999999994, 143.5, -66.668903999999998, 113.96875)) + ), + 1L + ); // top == bottom && left == right - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 880)) - .filter(geoBoundingBoxQuery("location").setCorners(18.036842, 59.328355000000002, 18.036842, 59.328355000000002)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 534)) - .filter( - geoBoundingBoxQuery("location").setCorners( - 45.509526999999999, - -73.570986000000005, - 45.509526999999999, - -73.570986000000005 + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 880)) + .filter(geoBoundingBoxQuery("location").setCorners(18.036842, 59.328355000000002, 18.036842, 59.328355000000002)) + ), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 534)) + .filter( + geoBoundingBoxQuery("location").setCorners( + 45.509526999999999, + -73.570986000000005, + 45.509526999999999, + -73.570986000000005 + ) ) - ) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + ), + 1L + ); // top == bottom - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 880)) - .filter(geoBoundingBoxQuery("location").setCorners(18.036842, 143.5, 18.036842, 113.96875)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 534)) - .filter(geoBoundingBoxQuery("location").setCorners(45.509526999999999, 143.5, 45.509526999999999, 113.96875)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 880)) + .filter(geoBoundingBoxQuery("location").setCorners(18.036842, 143.5, 18.036842, 113.96875)) + ), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 534)) + .filter(geoBoundingBoxQuery("location").setCorners(45.509526999999999, 143.5, 45.509526999999999, 113.96875)) + ), + 1L + ); // left == right - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 880)) - .filter( - geoBoundingBoxQuery("location").setCorners( - 74.579421999999994, - 59.328355000000002, - -66.668903999999998, - 59.328355000000002 + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 880)) + .filter( + geoBoundingBoxQuery("location").setCorners( + 74.579421999999994, + 59.328355000000002, + -66.668903999999998, + 59.328355000000002 + ) ) - ) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 534)) - .filter( - geoBoundingBoxQuery("location").setCorners( - 74.579421999999994, - -73.570986000000005, - -66.668903999999998, - -73.570986000000005 + ), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 534)) + .filter( + geoBoundingBoxQuery("location").setCorners( + 74.579421999999994, + -73.570986000000005, + -66.668903999999998, + -73.570986000000005 + ) ) - ) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + ), + 1L + ); // Distance query - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 880)) - .filter(geoDistanceQuery("location").point(20, 60.0).distance(500, DistanceUnit.MILES)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - - searchResponse = client().prepareSearch() - .setQuery( - boolQuery().must(termQuery("userid", 534)) - .filter(geoDistanceQuery("location").point(45.0, -73.0).distance(500, DistanceUnit.MILES)) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 880)) + .filter(geoDistanceQuery("location").point(20, 60.0).distance(500, DistanceUnit.MILES)) + ), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery( + boolQuery().must(termQuery("userid", 534)) + .filter(geoDistanceQuery("location").point(45.0, -73.0).distance(500, DistanceUnit.MILES)) + ), + 1L + ); } public void testCompleteLonRange() throws Exception { @@ -336,60 +363,77 @@ public void testCompleteLonRange() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - searchResponse = client().prepareSearch() - .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180)), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, -180, -50, 180)), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180)), + 2L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, -180, -90, 180)), + 2L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360)), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(50, 0, -50, 360)), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360)), + 2L + ); + + assertHitCount( + client().prepareSearch() + .setQuery(geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE).setCorners(90, 0, -90, 360)), + 2L + ); // top == bottom - searchResponse = client().prepareSearch() - .setQuery( - geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE) - .setCorners(59.328355000000002, 0, 59.328355000000002, 360) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch() - .setQuery( - geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE) - .setCorners(59.328355000000002, -180, 59.328355000000002, 180) - ) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount( + client().prepareSearch() + .setQuery( + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE) + .setCorners(59.328355000000002, 0, 59.328355000000002, 360) + ), + 1L + ); + + assertHitCount( + client().prepareSearch() + .setQuery( + geoBoundingBoxQuery("location").setValidationMethod(GeoValidationMethod.COERCE) + .setCorners(59.328355000000002, -180, 59.328355000000002, 180) + ), + 1L + ); // Distance query - searchResponse = client().prepareSearch() - .setQuery(geoDistanceQuery("location").point(60.0, -20.0).distance(1800, DistanceUnit.MILES)) - .get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount( + client().prepareSearch().setQuery(geoDistanceQuery("location").point(60.0, -20.0).distance(1800, DistanceUnit.MILES)), + 1L + ); } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java index 09c134ad8ef96..29307f7f63ce9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeIntegTestCase.java @@ -57,28 +57,27 @@ public void testIndexPolygonDateLine() throws Exception { indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))).get(); + SearchResponse searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-179.75, 1))).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))).get(); + searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(90, 1))).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); - searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))).get(); + searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(-180, 1))).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))).get(); + searchResponse = prepareSearch("test").setQuery(geoShapeQuery("shape", new Point(180, 1))).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); } /** The testBulk method uses this only for Geo-specific tests */ protected void doDistanceAndBoundingBoxTest(String key) { assertHitCount( - client().prepareSearch().addStoredField("pin").setQuery(geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999)), + prepareSearch().addStoredField("pin").setQuery(geoBoundingBoxQuery("pin").setCorners(90, -179.99999, -90, 179.99999)), 53 ); - SearchResponse distance = client().prepareSearch() - .addStoredField("pin") + SearchResponse distance = prepareSearch().addStoredField("pin") .setQuery(geoDistanceQuery("pin").distance("425km").point(51.11, 9.851)) .get(); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index 37ab420849804..b8f5cdf00da34 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -36,8 +36,7 @@ import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; public abstract class GeoShapeQueryTestCase extends BaseShapeQueryTestCase { @@ -160,11 +159,10 @@ public void testIndexRectangleSpanningDateLine() throws Exception { Point filterShape = new Point(179, 0); - SearchResponse result = client().prepareSearch(defaultIndexName) - .setQuery(queryBuilder().intersectionQuery(defaultFieldName, filterShape)) - .get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName).setQuery(queryBuilder().intersectionQuery(defaultFieldName, filterShape)), + 1 + ); } protected Line makeRandomLine() { diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index fc4b3ce93e11c..94c40b2d53b00 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -34,8 +34,10 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.FinalizeSnapshotContext; import org.elasticsearch.repositories.RepositoriesService; @@ -176,8 +178,12 @@ protected RepositoryData getRepositoryData(String repository) { return getRepositoryData((Repository) getRepositoryOnMaster(repository)); } - protected RepositoryData getRepositoryData(Repository repository) { - return PlainActionFuture.get(repository::getRepositoryData); + public static RepositoryData getRepositoryData(Repository repository) { + return PlainActionFuture.get( + listener -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, listener), + 10, + TimeUnit.SECONDS + ); } public static long getFailureCount(String repository) { @@ -360,7 +366,7 @@ protected void maybeInitWithOldSnapshotVersion(String repoName, Path repoPath) t initWithSnapshotVersion( repoName, repoPath, - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_8_9_0) + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_8_9_0) ); } } @@ -385,7 +391,7 @@ protected String initWithSnapshotVersion(String repoName, Path repoPath, IndexVe repositoryData.snapshotsToXContent(jsonBuilder, version); final var currentVersionString = Strings.toString(jsonBuilder); final String oldVersionString; - if (version.onOrAfter(IndexVersion.FIRST_DETACHED_INDEX_VERSION)) { + if (version.onOrAfter(IndexVersions.FIRST_DETACHED_INDEX_VERSION)) { oldVersionString = currentVersionString.replace( ",\"index_version\":" + IndexVersion.current(), ",\"index_version\":" + version diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/DelegatingMeter.java b/test/framework/src/main/java/org/elasticsearch/telemetry/DelegatingMeter.java deleted file mode 100644 index 25333c869dbf3..0000000000000 --- a/test/framework/src/main/java/org/elasticsearch/telemetry/DelegatingMeter.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.telemetry; - -import org.elasticsearch.telemetry.metric.DoubleCounter; -import org.elasticsearch.telemetry.metric.DoubleGauge; -import org.elasticsearch.telemetry.metric.DoubleHistogram; -import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; -import org.elasticsearch.telemetry.metric.LongCounter; -import org.elasticsearch.telemetry.metric.LongGauge; -import org.elasticsearch.telemetry.metric.LongHistogram; -import org.elasticsearch.telemetry.metric.LongUpDownCounter; -import org.elasticsearch.telemetry.metric.Meter; - -public class DelegatingMeter implements Meter { - - private final Meter delegate; - - public DelegatingMeter(Meter delegate) { - this.delegate = delegate; - } - - @Override - public DoubleCounter registerDoubleCounter(String name, String description, String unit) { - return delegate.registerDoubleCounter(name, description, unit); - } - - @Override - public DoubleCounter getDoubleCounter(String name) { - return delegate.getDoubleCounter(name); - } - - @Override - public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { - return delegate.registerDoubleUpDownCounter(name, description, unit); - } - - @Override - public DoubleUpDownCounter getDoubleUpDownCounter(String name) { - return delegate.getDoubleUpDownCounter(name); - } - - @Override - public DoubleGauge registerDoubleGauge(String name, String description, String unit) { - return delegate.registerDoubleGauge(name, description, unit); - } - - @Override - public DoubleGauge getDoubleGauge(String name) { - return delegate.getDoubleGauge(name); - } - - @Override - public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { - return delegate.registerDoubleHistogram(name, description, unit); - } - - @Override - public DoubleHistogram getDoubleHistogram(String name) { - return delegate.getDoubleHistogram(name); - } - - @Override - public LongCounter registerLongCounter(String name, String description, String unit) { - return delegate.registerLongCounter(name, description, unit); - } - - @Override - public LongCounter getLongCounter(String name) { - return delegate.getLongCounter(name); - } - - @Override - public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { - return delegate.registerLongUpDownCounter(name, description, unit); - } - - @Override - public LongUpDownCounter getLongUpDownCounter(String name) { - return delegate.getLongUpDownCounter(name); - } - - @Override - public LongGauge registerLongGauge(String name, String description, String unit) { - return delegate.registerLongGauge(name, description, unit); - } - - @Override - public LongGauge getLongGauge(String name) { - return delegate.getLongGauge(name); - } - - @Override - public LongHistogram registerLongHistogram(String name, String description, String unit) { - return delegate.registerLongHistogram(name, description, unit); - } - - @Override - public LongHistogram getLongHistogram(String name) { - return delegate.getLongHistogram(name); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/InstrumentType.java b/test/framework/src/main/java/org/elasticsearch/telemetry/InstrumentType.java new file mode 100644 index 0000000000000..3930adf1af638 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/InstrumentType.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.Instrument; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; + +import java.util.Objects; + +/** + * Enum with the different types for use as keys. This enum acts a bridge between the Otel and Elasticsearch versions of each + * of the instruments. + */ +public enum InstrumentType { + DOUBLE_COUNTER(true), + LONG_COUNTER(false), + DOUBLE_UP_DOWN_COUNTER(true), + LONG_UP_DOWN_COUNTER(false), + DOUBLE_HISTOGRAM(true), + LONG_HISTOGRAM(false), + DOUBLE_GAUGE(true), + LONG_GAUGE(false); + + public final boolean isDouble; + public final boolean isLong; + + InstrumentType(boolean isDouble) { + this.isDouble = isDouble; + this.isLong = isDouble == false; + } + + public static InstrumentType fromInstrument(Instrument instrument) { + Objects.requireNonNull(instrument); + if (instrument instanceof DoubleCounter) { + return InstrumentType.DOUBLE_COUNTER; + } else if (instrument instanceof LongCounter) { + return InstrumentType.LONG_COUNTER; + } else if (instrument instanceof DoubleUpDownCounter) { + return InstrumentType.DOUBLE_UP_DOWN_COUNTER; + } else if (instrument instanceof LongUpDownCounter) { + return InstrumentType.LONG_UP_DOWN_COUNTER; + } else if (instrument instanceof DoubleHistogram) { + return InstrumentType.DOUBLE_HISTOGRAM; + } else if (instrument instanceof LongHistogram) { + return InstrumentType.LONG_HISTOGRAM; + } else if (instrument instanceof DoubleGauge) { + return InstrumentType.DOUBLE_GAUGE; + } else if (instrument instanceof LongGauge) { + return InstrumentType.LONG_GAUGE; + } else { + throw new IllegalArgumentException("unknown instrument [" + instrument.getClass().getName() + "]"); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/Measurement.java b/test/framework/src/main/java/org/elasticsearch/telemetry/Measurement.java new file mode 100644 index 0000000000000..76f94f54fbad9 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/Measurement.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +/** + * A single measurement from an {@link org.elasticsearch.telemetry.metric.Instrument}. + */ +public record Measurement(Number value, Map attributes, boolean isDouble) { + public Measurement { + Objects.requireNonNull(value); + } + + public boolean isLong() { + return isDouble == false; + } + + public double getDouble() { + assert isDouble; + return value.doubleValue(); + } + + public long getLong() { + assert isLong(); + return value.longValue(); + } + + /** + * Add measurements with the same attributes together. All measurements must be from the + * same instrument. If some measurements differ on {@link #isDouble}, @throws IllegalArgumentException + */ + public static List combine(List measurements) { + if (measurements == null || measurements.isEmpty()) { + return Collections.emptyList(); + } + boolean isDouble = measurements.get(0).isDouble; + Map, Number> byAttr = new HashMap<>(); + measurements.forEach(m -> { + if (m.isDouble != isDouble) { + throw new IllegalArgumentException("cannot combine measurements of different types"); + } + byAttr.compute( + m.attributes, + (k, v) -> (v == null) ? m.value : isDouble ? v.doubleValue() + m.getDouble() : v.longValue() + m.getLong() + ); + }); + return byAttr.entrySet() + .stream() + .map(entry -> new Measurement(entry.getValue(), entry.getKey(), isDouble)) + .collect(Collectors.toList()); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java b/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java new file mode 100644 index 0000000000000..29218783a45b7 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/MetricRecorder.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import org.elasticsearch.core.Strings; +import org.elasticsearch.telemetry.metric.Instrument; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +/** + * Container for registered Instruments (either {@link Instrument} or Otel's versions). + * Records invocations of the Instruments as {@link Measurement}s. + * @param The supertype of the registered instrument. + */ +public class MetricRecorder { + + /** + * Container for Instrument of a given type, such as DoubleGauge, LongHistogram, etc. + * @param registered - registration records for each named metrics + * @param called - one instance per invocation of the instance + * @param instruments - the instrument instance + */ + private record RegisteredMetric( + Map registered, + Map> called, + Map instruments, + List callbacks + ) { + void register(String name, String description, String unit, I instrument) { + assert registered.containsKey(name) == false + : Strings.format("unexpected [{}]: [{}][{}], already registered[{}]", name, description, unit, registered.get(name)); + registered.put(name, new Registration(name, description, unit)); + instruments.put(name, instrument); + if (instrument instanceof Runnable callback) { + callbacks.add(callback); + } + } + + void call(String name, Measurement call) { + assert registered.containsKey(name) : Strings.format("call for unregistered metric [{}]: [{}]", name, call); + called.computeIfAbsent(Objects.requireNonNull(name), k -> new ArrayList<>()).add(call); + } + + } + + /** + * The containers for each metric type. + */ + private final Map> metrics; + + public MetricRecorder() { + metrics = new HashMap<>(InstrumentType.values().length); + for (var instrument : InstrumentType.values()) { + metrics.put(instrument, new RegisteredMetric<>(new HashMap<>(), new HashMap<>(), new HashMap<>(), new ArrayList<>())); + } + } + + /** + * Register an instrument. Instruments must be registered before they are used. + */ + public void register(I instrument, InstrumentType instrumentType, String name, String description, String unit) { + metrics.get(instrumentType).register(name, description, unit, instrument); + } + + /** + * Record a call made to a registered Elasticsearch {@link Instrument}. + */ + public void call(Instrument instrument, Number value, Map attributes) { + call(InstrumentType.fromInstrument(instrument), instrument.getName(), value, attributes); + } + + /** + * Record a call made to the registered instrument represented by the {@link InstrumentType} enum. + */ + public void call(InstrumentType instrumentType, String name, Number value, Map attributes) { + metrics.get(instrumentType).call(name, new Measurement(value, attributes, instrumentType.isDouble)); + } + + /** + * Get the {@link Measurement}s for each call of the given registered Elasticsearch {@link Instrument}. + */ + public List getMeasurements(Instrument instrument) { + return getMeasurements(InstrumentType.fromInstrument(instrument), instrument.getName()); + } + + public List getMeasurements(InstrumentType instrumentType, String name) { + return metrics.get(instrumentType).called.getOrDefault(Objects.requireNonNull(name), Collections.emptyList()); + } + + /** + * Get the {@link Registration} for a given elasticsearch {@link Instrument}. + */ + public Registration getRegistration(Instrument instrument) { + return metrics.get(InstrumentType.fromInstrument(instrument)).registered().get(instrument.getName()); + } + + /** + * Fetch the instrument instance given the type and registered name. + */ + public I getInstrument(InstrumentType instrumentType, String name) { + return metrics.get(instrumentType).instruments.get(name); + } + + public void resetCalls() { + metrics.forEach((it, rm) -> rm.called().clear()); + } + + public void collect() { + metrics.forEach((it, rm) -> rm.callbacks().forEach(Runnable::run)); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingInstruments.java b/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingInstruments.java new file mode 100644 index 0000000000000..7067c390ef5ae --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingInstruments.java @@ -0,0 +1,212 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import org.elasticsearch.common.util.concurrent.ReleasableLock; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import org.elasticsearch.telemetry.metric.Instrument; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; +import org.elasticsearch.telemetry.metric.LongWithAttributes; + +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.locks.ReentrantLock; +import java.util.function.Supplier; + +/** + * Recording versions of Elasticsearch {@link Instrument}s. All invocations are recorded via {@link MetricRecorder}. + */ +public class RecordingInstruments { + protected abstract static class RecordingInstrument implements Instrument { + protected final String name; + private final MetricRecorder recorder; + + public RecordingInstrument(String name, MetricRecorder recorder) { + this.name = Objects.requireNonNull(name); + this.recorder = Objects.requireNonNull(recorder); + } + + protected void call(Number value, Map attributes) { + recorder.call(this, value, attributes); + } + + @Override + public String getName() { + return name; + } + } + + protected interface NumberWithAttributesObserver extends Supplier>> { + + } + + protected abstract static class CallbackRecordingInstrument extends RecordingInstrument implements AutoCloseable, Runnable { + private final NumberWithAttributesObserver observer; + private boolean closed = false; + private final ReleasableLock closedLock = new ReleasableLock(new ReentrantLock()); + + public CallbackRecordingInstrument(String name, NumberWithAttributesObserver observer, MetricRecorder recorder) { + super(name, recorder); + this.observer = observer; + } + + @Override + public void run() { + try (ReleasableLock lock = closedLock.acquire()) { + if (closed) { + return; + } + var observation = observer.get(); + call(observation.v1(), observation.v2()); + } + } + + @Override + public void close() throws Exception { + try (ReleasableLock lock = closedLock.acquire()) { + assert closed == false : "double close"; + closed = true; + } + } + } + + public static class RecordingDoubleCounter extends RecordingInstrument implements DoubleCounter { + public RecordingDoubleCounter(String name, MetricRecorder recorder) { + super(name, recorder); + } + + @Override + public void increment() { + incrementBy(1.0, Collections.emptyMap()); + } + + @Override + public void incrementBy(double inc) { + incrementBy(inc, Collections.emptyMap()); + } + + @Override + public void incrementBy(double inc, Map attributes) { + call(inc, attributes); + } + } + + public static class RecordingDoubleGauge extends CallbackRecordingInstrument implements DoubleGauge { + public RecordingDoubleGauge(String name, Supplier observer, MetricRecorder recorder) { + super(name, () -> { + var observation = observer.get(); + return new Tuple<>(observation.value(), observation.attributes()); + }, recorder); + } + } + + public static class RecordingDoubleHistogram extends RecordingInstrument implements DoubleHistogram { + public RecordingDoubleHistogram(String name, MetricRecorder recorder) { + super(name, recorder); + } + + @Override + public void record(double value) { + record(value, Collections.emptyMap()); + } + + @Override + public void record(double value, Map attributes) { + call(value, attributes); + } + } + + public static class RecordingDoubleUpDownCounter extends RecordingInstrument implements DoubleUpDownCounter { + public RecordingDoubleUpDownCounter(String name, MetricRecorder recorder) { + super(name, recorder); + } + + @Override + public void add(double inc) { + add(inc, Collections.emptyMap()); + } + + @Override + public void add(double inc, Map attributes) { + call(inc, attributes); + } + } + + public static class RecordingLongCounter extends RecordingInstrument implements LongCounter { + public RecordingLongCounter(String name, MetricRecorder recorder) { + super(name, recorder); + } + + @Override + public void increment() { + incrementBy(1L, Collections.emptyMap()); + } + + @Override + public void incrementBy(long inc) { + incrementBy(inc, Collections.emptyMap()); + } + + @Override + public void incrementBy(long inc, Map attributes) { + call(inc, attributes); + } + } + + public static class RecordingLongGauge extends CallbackRecordingInstrument implements LongGauge { + + public RecordingLongGauge(String name, Supplier observer, MetricRecorder recorder) { + super(name, () -> { + var observation = observer.get(); + return new Tuple<>(observation.value(), observation.attributes()); + }, recorder); + } + } + + public static class RecordingLongHistogram extends RecordingInstrument implements LongHistogram { + public RecordingLongHistogram(String name, MetricRecorder recorder) { + super(name, recorder); + } + + @Override + public void record(long value) { + record(value, Collections.emptyMap()); + } + + @Override + public void record(long value, Map attributes) { + call(value, attributes); + } + } + + public static class RecordingLongUpDownCounter extends RecordingInstrument implements LongUpDownCounter { + public RecordingLongUpDownCounter(String name, MetricRecorder recorder) { + super(name, recorder); + } + + @Override + public void add(long inc) { + add(inc, Collections.emptyMap()); + } + + @Override + public void add(long inc, Map attributes) { + call(inc, attributes); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java b/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java new file mode 100644 index 0000000000000..f552b2d001b42 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/RecordingMeterRegistry.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import org.elasticsearch.telemetry.metric.DoubleCounter; +import org.elasticsearch.telemetry.metric.DoubleGauge; +import org.elasticsearch.telemetry.metric.DoubleHistogram; +import org.elasticsearch.telemetry.metric.DoubleUpDownCounter; +import org.elasticsearch.telemetry.metric.DoubleWithAttributes; +import org.elasticsearch.telemetry.metric.Instrument; +import org.elasticsearch.telemetry.metric.LongCounter; +import org.elasticsearch.telemetry.metric.LongGauge; +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.LongUpDownCounter; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +import java.util.function.Supplier; + +/** + * A {@link MeterRegistry} that records all instrument invocations. + * Tests can subclass this class and extend the build[Instrument] methods to do their + * own validations at instrument registration time and/or provide their own instruments. + */ +public class RecordingMeterRegistry implements MeterRegistry { + protected final MetricRecorder recorder = new MetricRecorder<>(); + + MetricRecorder getRecorder() { + return recorder; + } + + @Override + public DoubleCounter registerDoubleCounter(String name, String description, String unit) { + DoubleCounter instrument = buildDoubleCounter(name, description, unit); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public DoubleCounter getDoubleCounter(String name) { + return (DoubleCounter) recorder.getInstrument(InstrumentType.DOUBLE_COUNTER, name); + } + + protected DoubleCounter buildDoubleCounter(String name, String description, String unit) { + return new RecordingInstruments.RecordingDoubleCounter(name, recorder); + } + + @Override + public DoubleUpDownCounter registerDoubleUpDownCounter(String name, String description, String unit) { + DoubleUpDownCounter instrument = buildDoubleUpDownCounter(name, description, unit); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public DoubleUpDownCounter getDoubleUpDownCounter(String name) { + return (DoubleUpDownCounter) recorder.getInstrument(InstrumentType.DOUBLE_UP_DOWN_COUNTER, name); + } + + protected DoubleUpDownCounter buildDoubleUpDownCounter(String name, String description, String unit) { + return new RecordingInstruments.RecordingDoubleUpDownCounter(name, recorder); + } + + @Override + public DoubleGauge registerDoubleGauge(String name, String description, String unit, Supplier observer) { + DoubleGauge instrument = buildDoubleGauge(name, description, unit, observer); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public DoubleGauge getDoubleGauge(String name) { + return (DoubleGauge) recorder.getInstrument(InstrumentType.DOUBLE_GAUGE, name); + } + + protected DoubleGauge buildDoubleGauge(String name, String description, String unit, Supplier observer) { + return new RecordingInstruments.RecordingDoubleGauge(name, observer, recorder); + } + + @Override + public DoubleHistogram registerDoubleHistogram(String name, String description, String unit) { + DoubleHistogram instrument = buildDoubleHistogram(name, description, unit); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public DoubleHistogram getDoubleHistogram(String name) { + return (DoubleHistogram) recorder.getInstrument(InstrumentType.DOUBLE_HISTOGRAM, name); + } + + protected DoubleHistogram buildDoubleHistogram(String name, String description, String unit) { + return new RecordingInstruments.RecordingDoubleHistogram(name, recorder); + } + + @Override + public LongCounter registerLongCounter(String name, String description, String unit) { + LongCounter instrument = buildLongCounter(name, description, unit); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public LongCounter getLongCounter(String name) { + return (LongCounter) recorder.getInstrument(InstrumentType.LONG_COUNTER, name); + } + + protected LongCounter buildLongCounter(String name, String description, String unit) { + return new RecordingInstruments.RecordingLongCounter(name, recorder); + } + + @Override + public LongUpDownCounter registerLongUpDownCounter(String name, String description, String unit) { + LongUpDownCounter instrument = buildLongUpDownCounter(name, description, unit); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public LongUpDownCounter getLongUpDownCounter(String name) { + return (LongUpDownCounter) recorder.getInstrument(InstrumentType.LONG_UP_DOWN_COUNTER, name); + } + + protected LongUpDownCounter buildLongUpDownCounter(String name, String description, String unit) { + return new RecordingInstruments.RecordingLongUpDownCounter(name, recorder); + } + + @Override + public LongGauge registerLongGauge(String name, String description, String unit, Supplier observer) { + LongGauge instrument = buildLongGauge(name, description, unit, observer); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public LongGauge getLongGauge(String name) { + return (LongGauge) recorder.getInstrument(InstrumentType.LONG_GAUGE, name); + } + + protected LongGauge buildLongGauge(String name, String description, String unit, Supplier observer) { + return new RecordingInstruments.RecordingLongGauge(name, observer, recorder); + } + + @Override + public LongHistogram registerLongHistogram(String name, String description, String unit) { + LongHistogram instrument = buildLongHistogram(name, description, unit); + recorder.register(instrument, InstrumentType.fromInstrument(instrument), name, description, unit); + return instrument; + } + + @Override + public LongHistogram getLongHistogram(String name) { + return (LongHistogram) recorder.getInstrument(InstrumentType.LONG_HISTOGRAM, name); + } + + protected LongHistogram buildLongHistogram(String name, String description, String unit) { + return new RecordingInstruments.RecordingLongHistogram(name, recorder); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/Registration.java b/test/framework/src/main/java/org/elasticsearch/telemetry/Registration.java new file mode 100644 index 0000000000000..8a54d9b1476a4 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/Registration.java @@ -0,0 +1,22 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import java.util.Objects; + +/** + * A record of the arguments for a registered instrument. + */ +record Registration(String name, String description, String unit) { + Registration { + Objects.requireNonNull(name); + Objects.requireNonNull(description); + Objects.requireNonNull(unit); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java b/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java new file mode 100644 index 0000000000000..53aef542f0d1a --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/telemetry/TestTelemetryPlugin.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.TelemetryPlugin; +import org.elasticsearch.telemetry.metric.Instrument; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.telemetry.tracing.Tracer; + +import java.util.List; + +/** + * TelemetryPlugin that uses RecordingMeterRegistry to record meter calls + * and exposes measurement getters. + */ +public class TestTelemetryPlugin extends Plugin implements TelemetryPlugin { + + protected final RecordingMeterRegistry meter = new RecordingMeterRegistry(); + + Registration getRegistration(Instrument instrument) { + return meter.getRecorder().getRegistration(instrument); + } + + public List getMetrics(Instrument instrument) { + return meter.getRecorder().getMeasurements(instrument); + } + + public List getDoubleCounterMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.DOUBLE_COUNTER, name); + } + + public List getLongCounterMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.LONG_COUNTER, name); + } + + public List getDoubleUpDownCounterMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.DOUBLE_UP_DOWN_COUNTER, name); + } + + public List getLongUpDownCounterMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.LONG_UP_DOWN_COUNTER, name); + } + + public List getDoubleGaugeMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.DOUBLE_GAUGE, name); + } + + public List getLongGaugeMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.LONG_GAUGE, name); + } + + public List getDoubleHistogramMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.DOUBLE_HISTOGRAM, name); + } + + public List getLongHistogramMeasurement(String name) { + return meter.getRecorder().getMeasurements(InstrumentType.LONG_HISTOGRAM, name); + } + + @Override + public TelemetryProvider getTelemetryProvider(Settings settings) { + return new TelemetryProvider() { + @Override + public Tracer getTracer() { + return Tracer.NOOP; + } + + @Override + public MeterRegistry getMeterRegistry() { + return meter; + } + }; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractChunkedSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractChunkedSerializingTestCase.java index 4e26869b492b5..bf00efe4d0374 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractChunkedSerializingTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractChunkedSerializingTestCase.java @@ -66,7 +66,7 @@ public static void assertChunkCount( builder.endObject(); } } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } // closing the builder verifies that the XContent is well-formed assertEquals(expectedChunkCount.applyAsInt(instance), chunkCount); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java index 4325ba46f5e86..c4eb63f440905 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSearchCancellationTestCase.java @@ -281,7 +281,9 @@ public void onIndexModule(IndexModule indexModule) { indexModule.addSearchOperationListener(new SearchOperationListener() { @Override public void onNewReaderContext(ReaderContext c) { - runOnNewReaderContext.get().accept(c); + if (runOnNewReaderContext.get() != null) { + runOnNewReaderContext.get().accept(c); + } } }); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/BuildUtils.java b/test/framework/src/main/java/org/elasticsearch/test/BuildUtils.java index 8b846a0488ab1..8823d725a6506 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/BuildUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/BuildUtils.java @@ -100,6 +100,7 @@ public static Build mutateBuild(Build existing) { } private static String randomStringExcept(final String s) { - return randomAlphaOfLength(13 - s.length()); + int len = s == null ? 0 : s.length(); + return randomAlphaOfLength(13 - len); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index a557841ba34bd..832902f52deb2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; @@ -49,6 +48,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.DefaultShardOperationFailedException; @@ -1064,8 +1064,7 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr if (lastKnownCount >= numDocs) { try { - long count = client().prepareSearch() - .setTrackTotalHits(true) + long count = prepareSearch().setTrackTotalHits(true) .setSize(0) .setQuery(matchAllQuery()) .get() @@ -1095,6 +1094,10 @@ public void waitForDocs(final long numDocs, final BackgroundIndexer indexer) thr }, maxWaitTimeMs, TimeUnit.MILLISECONDS); } + public static SearchRequestBuilder prepareSearch(String... indices) { + return client().prepareSearch(indices); + } + /** * Retrieves the persistent tasks with the requested task name from the given cluster state. */ @@ -1406,10 +1409,6 @@ protected final DocWriteResponse index(String index, String id, Map startIndex(String index, String id, BytesReference source, XContentType type) { - return client().prepareIndex(index).setId(id).setSource(source, type).execute(); - } - /** * Syntactic sugar for: *
    @@ -2076,8 +2075,9 @@ private NodeConfigurationSource getNodeConfigSource() {
             }
             boolean enableConcurrentSearch = enableConcurrentSearch();
             if (enableConcurrentSearch) {
    -            initialNodeSettings.put(SearchService.QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey(), true);
                 initialNodeSettings.put(SearchService.MINIMUM_DOCS_PER_SLICE.getKey(), 1);
    +        } else {
    +            initialNodeSettings.put(SearchService.QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey(), false);
             }
             return new NodeConfigurationSource() {
                 @Override
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
    index 76ed45e2bbbe5..b5ac94b53d3ca 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
    @@ -133,7 +133,7 @@ public void tearDown() throws Exception {
             var deleteDataStreamsRequest = new DeleteDataStreamAction.Request("*");
             deleteDataStreamsRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN);
             try {
    -            assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest).actionGet());
    +            assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest));
             } catch (IllegalStateException e) {
                 // Ignore if action isn't registered, because data streams is a module and
                 // if the delete action isn't registered then there no data streams to delete.
    @@ -254,8 +254,9 @@ private Node newNode() {
     
             boolean enableConcurrentSearch = enableConcurrentSearch();
             if (enableConcurrentSearch) {
    -            settingBuilder.put(SearchService.QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey(), true)
    -                .put(SearchService.MINIMUM_DOCS_PER_SLICE.getKey(), 1);
    +            settingBuilder.put(SearchService.MINIMUM_DOCS_PER_SLICE.getKey(), 1);
    +        } else {
    +            settingBuilder.put(SearchService.QUERY_PHASE_PARALLEL_COLLECTION_ENABLED.getKey(), false);
             }
             Settings settings = settingBuilder.build();
     
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
    index c7e74bde890f8..562777e995fdc 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
    @@ -2013,9 +2013,9 @@ public static void safeAwait(CyclicBarrier barrier) {
                 barrier.await(10, TimeUnit.SECONDS);
             } catch (InterruptedException e) {
                 Thread.currentThread().interrupt();
    -            throw new AssertionError("unexpected", e);
    +            fail(e);
             } catch (Exception e) {
    -            throw new AssertionError("unexpected", e);
    +            fail(e);
             }
         }
     
    @@ -2024,7 +2024,7 @@ public static void safeAwait(CountDownLatch countDownLatch) {
                 assertTrue(countDownLatch.await(10, TimeUnit.SECONDS));
             } catch (InterruptedException e) {
                 Thread.currentThread().interrupt();
    -            throw new AssertionError("unexpected", e);
    +            fail(e);
             }
         }
     
    @@ -2033,7 +2033,7 @@ public static void safeSleep(long millis) {
                 Thread.sleep(millis);
             } catch (InterruptedException e) {
                 Thread.currentThread().interrupt();
    -            throw new AssertionError("unexpected", e);
    +            fail(e);
             }
         }
     
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
    index b666226698570..bd0f2c88c2219 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java
    @@ -1912,7 +1912,7 @@ private Set excludeMasters(Collection nodeAndClients) {
                             new AddVotingConfigExclusionsRequest(excludedNodeNames.toArray(Strings.EMPTY_ARRAY))
                         ).get();
                     } catch (InterruptedException | ExecutionException e) {
    -                    throw new AssertionError("unexpected", e);
    +                    ESTestCase.fail(e);
                     }
                 }
             }
    @@ -1927,7 +1927,7 @@ private void removeExclusions(Set excludedNodeIds) {
                     Client client = getRandomNodeAndClient(node -> excludedNodeIds.contains(node.name) == false).client();
                     client.execute(ClearVotingConfigExclusionsAction.INSTANCE, new ClearVotingConfigExclusionsRequest()).get();
                 } catch (InterruptedException | ExecutionException e) {
    -                throw new AssertionError("unexpected", e);
    +                ESTestCase.fail(e);
                 }
             }
         }
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java
    index 9b9f464d8dff3..d504bebaa33e7 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/MockIndexEventListener.java
    @@ -7,18 +7,11 @@
      */
     package org.elasticsearch.test;
     
    -import org.elasticsearch.client.internal.Client;
    -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
     import org.elasticsearch.cluster.routing.ShardRouting;
    -import org.elasticsearch.cluster.routing.allocation.AllocationService;
    -import org.elasticsearch.cluster.service.ClusterService;
    -import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
     import org.elasticsearch.common.settings.Setting;
     import org.elasticsearch.common.settings.Setting.Property;
     import org.elasticsearch.common.settings.Settings;
     import org.elasticsearch.core.Nullable;
    -import org.elasticsearch.env.Environment;
    -import org.elasticsearch.env.NodeEnvironment;
     import org.elasticsearch.index.Index;
     import org.elasticsearch.index.IndexModule;
     import org.elasticsearch.index.IndexService;
    @@ -27,21 +20,13 @@
     import org.elasticsearch.index.shard.IndexShard;
     import org.elasticsearch.index.shard.IndexShardState;
     import org.elasticsearch.index.shard.ShardId;
    -import org.elasticsearch.indices.IndicesService;
     import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason;
     import org.elasticsearch.plugins.Plugin;
    -import org.elasticsearch.repositories.RepositoriesService;
    -import org.elasticsearch.script.ScriptService;
    -import org.elasticsearch.telemetry.TelemetryProvider;
    -import org.elasticsearch.threadpool.ThreadPool;
    -import org.elasticsearch.watcher.ResourceWatcherService;
    -import org.elasticsearch.xcontent.NamedXContentRegistry;
     
     import java.util.Arrays;
     import java.util.Collection;
     import java.util.Collections;
     import java.util.List;
    -import java.util.function.Supplier;
     
     /**
      * This is a testing plugin that registers a generic
    @@ -76,22 +61,7 @@ public void onIndexModule(IndexModule module) {
             }
     
             @Override
    -        public Collection createComponents(
    -            Client client,
    -            ClusterService clusterService,
    -            ThreadPool threadPool,
    -            ResourceWatcherService resourceWatcherService,
    -            ScriptService scriptService,
    -            NamedXContentRegistry xContentRegistry,
    -            Environment environment,
    -            NodeEnvironment nodeEnvironment,
    -            NamedWriteableRegistry namedWriteableRegistry,
    -            IndexNameExpressionResolver expressionResolver,
    -            Supplier repositoriesServiceSupplier,
    -            TelemetryProvider telemetryProvider,
    -            AllocationService allocationService,
    -            IndicesService indicesService
    -        ) {
    +        public Collection createComponents(PluginServices services) {
                 return Collections.singletonList(listener);
             }
         }
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/ReachabilityChecker.java b/test/framework/src/main/java/org/elasticsearch/test/ReachabilityChecker.java
    index f3a5fdf836199..b44ae52c08511 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/ReachabilityChecker.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/ReachabilityChecker.java
    @@ -116,7 +116,7 @@ public void assertReferenceEnqueuedForCollection(MemoryMXBean memoryMXBean, long
                         assertNull(phantomReference.get()); // always succeeds, we're just doing this to use the phantomReference for something
                     }
                 } catch (Exception e) {
    -                throw new AssertionError("unexpected", e);
    +                ESTestCase.fail(e);
                 }
             }
     
    @@ -128,7 +128,7 @@ public void assertReferenceNotEnqueuedForCollection(MemoryMXBean memoryMXBean) {
                     memoryMXBean.gc();
                     assertNull("became unreachable: " + description, referenceQueue.remove(100));
                 } catch (Exception e) {
    -                throw new AssertionError("unexpected", e);
    +                ESTestCase.fail(e);
                 }
             }
         }
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java
    index 7868043ac61b3..f355b736ec002 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java
    @@ -26,13 +26,13 @@
     import org.elasticsearch.action.search.ShardSearchFailure;
     import org.elasticsearch.action.support.DefaultShardOperationFailedException;
     import org.elasticsearch.action.support.broadcast.BaseBroadcastResponse;
    -import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
     import org.elasticsearch.action.support.master.IsAcknowledgedSupplier;
     import org.elasticsearch.cluster.block.ClusterBlock;
     import org.elasticsearch.cluster.block.ClusterBlockException;
     import org.elasticsearch.cluster.metadata.IndexMetadata;
     import org.elasticsearch.cluster.metadata.IndexTemplateMetadata;
     import org.elasticsearch.common.bytes.BytesReference;
    +import org.elasticsearch.core.CheckedConsumer;
     import org.elasticsearch.core.Nullable;
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.rest.RestStatus;
    @@ -56,7 +56,9 @@
     import java.util.Map;
     import java.util.Set;
     import java.util.concurrent.CountDownLatch;
    +import java.util.concurrent.ExecutionException;
     import java.util.concurrent.TimeUnit;
    +import java.util.function.Consumer;
     
     import static org.apache.lucene.tests.util.LuceneTestCase.expectThrows;
     import static org.apache.lucene.tests.util.LuceneTestCase.expectThrowsAnyOf;
    @@ -87,11 +89,15 @@
     
     public class ElasticsearchAssertions {
     
    -    public static void assertAcked(AcknowledgedRequestBuilder builder) {
    +    public static void assertAcked(ActionRequestBuilder builder) {
             assertAcked(builder, TimeValue.timeValueSeconds(30));
         }
     
    -    public static void assertAcked(AcknowledgedRequestBuilder builder, TimeValue timeValue) {
    +    public static void assertAcked(ActionFuture future) {
    +        assertAcked(future.actionGet());
    +    }
    +
    +    public static void assertAcked(ActionRequestBuilder builder, TimeValue timeValue) {
             assertAcked(builder.get(timeValue));
         }
     
    @@ -223,17 +229,16 @@ public static String formatShardStatus(SearchResponse response) {
             return msg.toString();
         }
     
    +    public static void assertNoSearchHits(SearchRequestBuilder searchRequestBuilder) {
    +        assertResponse(searchRequestBuilder, ElasticsearchAssertions::assertNoSearchHits);
    +    }
    +
         public static void assertNoSearchHits(SearchResponse searchResponse) {
             assertThat(searchResponse.getHits().getHits(), emptyArray());
         }
     
         public static void assertSearchHits(SearchRequestBuilder searchRequestBuilder, String... ids) {
    -        var res = searchRequestBuilder.get();
    -        try {
    -            assertSearchHits(res, ids);
    -        } finally {
    -            res.decRef();
    -        }
    +        assertResponse(searchRequestBuilder, res -> assertSearchHits(res, ids));
         }
     
         public static void assertSearchHits(SearchResponse searchResponse, String... ids) {
    @@ -244,28 +249,27 @@ public static void assertSearchHits(SearchResponse searchResponse, String... ids
             );
         }
     
    +    public static void assertSearchHitsWithoutFailures(SearchRequestBuilder requestBuilder, String... ids) {
    +        assertResponse(requestBuilder, res -> {
    +            assertNoFailures(res);
    +            assertHitCount(res, ids.length);
    +            assertSearchHits(res, ids);
    +        });
    +    }
    +
         public static void assertSortValues(SearchRequestBuilder searchRequestBuilder, Object[]... sortValues) {
    -        var searchResponse = searchRequestBuilder.get();
    -        try {
    -            assertSearchResponse(searchResponse);
    -            SearchHit[] hits = searchResponse.getHits().getHits();
    +        assertNoFailuresAndResponse(searchRequestBuilder, res -> {
    +            SearchHit[] hits = res.getHits().getHits();
                 assertEquals(sortValues.length, hits.length);
                 for (int i = 0; i < sortValues.length; ++i) {
                     final Object[] hitsSortValues = hits[i].getSortValues();
                     assertArrayEquals("Offset " + i + ", id " + hits[i].getId(), sortValues[i], hitsSortValues);
                 }
    -        } finally {
    -            searchResponse.decRef();
    -        }
    +        });
         }
     
         public static void assertOrderedSearchHits(SearchRequestBuilder searchRequestBuilder, String... ids) {
    -        var res = searchRequestBuilder.get();
    -        try {
    -            assertOrderedSearchHits(res, ids);
    -        } finally {
    -            res.decRef();
    -        }
    +        assertResponse(searchRequestBuilder, res -> assertOrderedSearchHits(res, ids));
         }
     
         public static void assertOrderedSearchHits(SearchResponse searchResponse, String... ids) {
    @@ -277,11 +281,14 @@ public static void assertOrderedSearchHits(SearchResponse searchResponse, String
         }
     
         public static void assertHitCount(SearchRequestBuilder searchRequestBuilder, long expectedHitCount) {
    -        var res = searchRequestBuilder.get();
    +        assertResponse(searchRequestBuilder, res -> assertHitCount(res, expectedHitCount));
    +    }
    +
    +    public static void assertHitCount(ActionFuture responseFuture, long expectedHitCount) {
             try {
    -            assertHitCount(res, expectedHitCount);
    -        } finally {
    -            res.decRef();
    +            assertResponse(responseFuture, res -> assertHitCount(res, expectedHitCount));
    +        } catch (ExecutionException | InterruptedException ex) {
    +            throw new AssertionError(ex);
             }
         }
     
    @@ -292,6 +299,10 @@ public static void assertHitCount(SearchResponse countResponse, long expectedHit
             }
         }
     
    +    public static void assertHitCountAndNoFailures(SearchRequestBuilder searchRequestBuilder, long expectedHitCount) {
    +        assertNoFailuresAndResponse(searchRequestBuilder, response -> assertHitCount(response, expectedHitCount));
    +    }
    +
         public static void assertExists(GetResponse response) {
             String message = String.format(Locale.ROOT, "Expected %s/%s to exist, but does not", response.getIndex(), response.getId());
             assertThat(message, response.isExists(), is(true));
    @@ -320,9 +331,42 @@ public static void assertSearchHit(SearchResponse searchResponse, int number, Ma
         }
     
         public static void assertNoFailures(SearchRequestBuilder searchRequestBuilder) {
    +        assertNoFailuresAndResponse(searchRequestBuilder, r -> {});
    +    }
    +
    +    public static void assertNoFailuresAndResponse(SearchRequestBuilder searchRequestBuilder, Consumer consumer) {
    +        assertResponse(searchRequestBuilder, res -> {
    +            assertNoFailures(res);
    +            consumer.accept(res);
    +        });
    +    }
    +
    +    public static void assertResponse(SearchRequestBuilder searchRequestBuilder, Consumer consumer) {
             var res = searchRequestBuilder.get();
             try {
    -            assertNoFailures(res);
    +            consumer.accept(res);
    +        } finally {
    +            res.decRef();
    +        }
    +    }
    +
    +    public static void assertResponse(ActionFuture responseFuture, Consumer consumer)
    +        throws ExecutionException, InterruptedException {
    +        var res = responseFuture.get();
    +        try {
    +            consumer.accept(res);
    +        } finally {
    +            res.decRef();
    +        }
    +    }
    +
    +    public static void assertCheckedResponse(
    +        SearchRequestBuilder searchRequestBuilder,
    +        CheckedConsumer consumer
    +    ) throws IOException {
    +        var res = searchRequestBuilder.get();
    +        try {
    +            consumer.accept(res);
             } finally {
                 res.decRef();
             }
    @@ -348,12 +392,13 @@ public static void assertFailures(SearchRequestBuilder searchRequestBuilder, Res
             // when the number for shards is randomized and we expect failures
             // we can either run into partial or total failures depending on the current number of shards
             try {
    -            SearchResponse searchResponse = searchRequestBuilder.get();
    -            assertThat("Expected shard failures, got none", searchResponse.getShardFailures(), not(emptyArray()));
    -            for (ShardSearchFailure shardSearchFailure : searchResponse.getShardFailures()) {
    -                assertThat(shardSearchFailure.status(), equalTo(restStatus));
    -                assertThat(shardSearchFailure.reason(), reasonMatcher);
    -            }
    +            assertResponse(searchRequestBuilder, response -> {
    +                assertThat("Expected shard failures, got none", response.getShardFailures(), not(emptyArray()));
    +                for (ShardSearchFailure shardSearchFailure : response.getShardFailures()) {
    +                    assertThat(shardSearchFailure.status(), equalTo(restStatus));
    +                    assertThat(shardSearchFailure.reason(), reasonMatcher);
    +                }
    +            });
             } catch (SearchPhaseExecutionException e) {
                 assertThat(e.status(), equalTo(restStatus));
                 assertThat(e.toString(), reasonMatcher);
    @@ -392,6 +437,28 @@ public static void assertHighlight(SearchResponse resp, int hit, String field, i
             assertHighlight(resp, hit, field, fragment, greaterThan(fragment), matcher);
         }
     
    +    public static void assertHighlight(
    +        SearchRequestBuilder searchRequestBuilder,
    +        int hit,
    +        String field,
    +        int fragment,
    +        int totalFragments,
    +        Matcher matcher
    +    ) {
    +        assertResponse(searchRequestBuilder, response -> assertHighlight(response, hit, field, fragment, equalTo(totalFragments), matcher));
    +    }
    +
    +    public static void assertHighlight(
    +        ActionFuture responseFuture,
    +        int hit,
    +        String field,
    +        int fragment,
    +        int totalFragments,
    +        Matcher matcher
    +    ) throws ExecutionException, InterruptedException {
    +        assertResponse(responseFuture, response -> assertHighlight(response, hit, field, fragment, equalTo(totalFragments), matcher));
    +    }
    +
         public static void assertHighlight(
             SearchResponse resp,
             int hit,
    @@ -436,6 +503,10 @@ private static void assertHighlight(
             assertThat(hit.getHighlightFields().get(field).fragments()[fragment].string(), matcher);
         }
     
    +    public static void assertNotHighlighted(SearchRequestBuilder searchRequestBuilder, int hit, String field) {
    +        assertResponse(searchRequestBuilder, response -> assertNotHighlighted(response, hit, field));
    +    }
    +
         public static void assertNotHighlighted(SearchResponse resp, int hit, String field) {
             assertNoFailures(resp);
             assertThat("not enough hits", resp.getHits().getHits(), arrayWithSize(greaterThan(hit)));
    @@ -509,7 +580,6 @@ public static void assertIndexTemplateExists(GetIndexTemplatesResponse templates
             assertThat(templatesResponse.getIndexTemplates(), hasItem(transformedMatch(IndexTemplateMetadata::name, equalTo(name))));
         }
     
    -    /*
         /*
          * matchers
          */
    @@ -639,23 +709,6 @@ public static void assertFutureThrows(ActionFuture future, RestStatus status,
             assertThat(extraInfo, ExceptionsHelper.status(e), equalTo(status));
         }
     
    -    /**
    -     * Applies basic assertions on the SearchResponse. This method checks if all shards were successful, if
    -     * any of the shards threw an exception and if the response is serializable.
    -     */
    -    public static SearchResponse assertSearchResponse(SearchRequestBuilder request) {
    -        return assertSearchResponse(request.get());
    -    }
    -
    -    /**
    -     * Applies basic assertions on the SearchResponse. This method checks if all shards were successful, if
    -     * any of the shards threw an exception and if the response is serializable.
    -     */
    -    public static SearchResponse assertSearchResponse(SearchResponse response) {
    -        assertNoFailures(response);
    -        return response;
    -    }
    -
         /**
          * Check if a file exists
          */
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java
    index 576f7468e0819..51736f6e533ca 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/index/IndexVersionUtils.java
    @@ -10,6 +10,7 @@
     
     import org.elasticsearch.core.Nullable;
     import org.elasticsearch.index.IndexVersion;
    +import org.elasticsearch.index.IndexVersions;
     import org.elasticsearch.index.KnownIndexVersions;
     import org.elasticsearch.test.ESTestCase;
     
    @@ -114,11 +115,11 @@ public static IndexVersion getNextVersion(IndexVersion version) {
     
         /** Returns a random {@code IndexVersion} that is compatible with {@link IndexVersion#current()} */
         public static IndexVersion randomCompatibleVersion(Random random) {
    -        return randomVersionBetween(random, IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current());
    +        return randomVersionBetween(random, IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current());
         }
     
         /** Returns a random {@code IndexVersion} that is compatible with the previous version to {@code version} */
         public static IndexVersion randomPreviousCompatibleVersion(Random random, IndexVersion version) {
    -        return randomVersionBetween(random, IndexVersion.MINIMUM_COMPATIBLE, getPreviousVersion(version));
    +        return randomVersionBetween(random, IndexVersions.MINIMUM_COMPATIBLE, getPreviousVersion(version));
         }
     }
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
    index 80237927655e0..f157ef5ced63f 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java
    @@ -642,14 +642,23 @@ protected Set preserveILMPolicyIds() {
                 "watch-history-ilm-policy-16",
                 "ml-size-based-ilm-policy",
                 "logs",
    +            "logs@lifecycle",
                 "metrics",
    +            "metrics@lifecycle",
                 "profiling",
    +            "profiling@lifecycle",
                 "synthetics",
    +            "synthetics@lifecycle",
                 "7-days-default",
    +            "7-days@lifecycle",
                 "30-days-default",
    +            "30-days@lifecycle",
                 "90-days-default",
    +            "90-days@lifecycle",
                 "180-days-default",
    +            "180-days@lifecycle",
                 "365-days-default",
    +            "365-days@lifecycle",
                 ".fleet-files-ilm-policy",
                 ".fleet-file-data-ilm-policy",
                 ".fleet-actions-results-ilm-policy",
    @@ -1869,6 +1878,10 @@ protected static boolean isXPackTemplate(String name) {
             if (name.startsWith("elastic-connectors")) {
                 return true;
             }
    +        if (name.contains("@")) {
    +            // We have a naming convention that internal component templates contain `@`. See also index-templates.asciidoc.
    +            return true;
    +        }
             switch (name) {
                 case ".watches":
                 case "security_audit_log":
    @@ -1891,7 +1904,6 @@ protected static boolean isXPackTemplate(String name) {
                 case "logstash-index-template":
                 case "security-index-template":
                 case "data-streams-mappings":
    -            case "ecs@dynamic_templates":
                 case "search-acl-filter":
                 case ".kibana-reporting":
                     return true;
    diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
    index 3a9b918e654e5..e0cd47c48515b 100644
    --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
    +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
    @@ -39,6 +39,7 @@
     import org.elasticsearch.search.SearchModule;
     import org.elasticsearch.tasks.TaskManager;
     import org.elasticsearch.telemetry.tracing.Tracer;
    +import org.elasticsearch.test.ESIntegTestCase;
     import org.elasticsearch.test.ESTestCase;
     import org.elasticsearch.test.tasks.MockTaskManager;
     import org.elasticsearch.threadpool.ThreadPool;
    @@ -74,6 +75,8 @@
     import java.util.function.Function;
     import java.util.function.Supplier;
     
    +import static org.junit.Assert.assertNotNull;
    +
     /**
      * A mock delegate service that allows to simulate different network topology failures.
      * Internally it maps TransportAddress objects to rules that inject failures.
    @@ -188,6 +191,14 @@ public static MockTransportService createNewService(
             );
         }
     
    +    public static MockTransportService getInstance(String nodeName) {
    +        assertNotNull("nodeName must not be null", nodeName);
    +        return ESTestCase.asInstanceOf(
    +            MockTransportService.class,
    +            ESIntegTestCase.internalCluster().getInstance(TransportService.class, nodeName)
    +        );
    +    }
    +
         private final Transport original;
     
         /**
    diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
    index 3b42181216bcb..f9085ec258627 100644
    --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
    +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
    @@ -48,6 +48,7 @@
     import org.elasticsearch.core.SuppressForbidden;
     import org.elasticsearch.core.TimeValue;
     import org.elasticsearch.index.IndexVersion;
    +import org.elasticsearch.index.IndexVersions;
     import org.elasticsearch.mocksocket.MockServerSocket;
     import org.elasticsearch.node.Node;
     import org.elasticsearch.tasks.Task;
    @@ -122,7 +123,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
         // we use always a non-alpha or beta version here otherwise minimumCompatibilityVersion will be different for the two used versions
         protected static final VersionInformation version0 = new VersionInformation(
             Version.fromString(String.valueOf(Version.CURRENT.major) + ".0.0"),
    -        IndexVersion.MINIMUM_COMPATIBLE,
    +        IndexVersions.MINIMUM_COMPATIBLE,
             IndexVersion.current()
         );
         protected static final TransportVersion transportVersion0 = TransportVersion.current();
    @@ -133,7 +134,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase {
     
         protected static final VersionInformation version1 = new VersionInformation(
             Version.fromId(version0.nodeVersion().id + 1),
    -        IndexVersion.MINIMUM_COMPATIBLE,
    +        IndexVersions.MINIMUM_COMPATIBLE,
             IndexVersion.current()
         );
         protected static final TransportVersion transportVersion1 = TransportVersion.fromId(transportVersion0.id() + 1);
    @@ -2322,7 +2323,7 @@ public void testHandshakeWithIncompatVersion() {
                     "TS_C",
                     new VersionInformation(
                         Version.CURRENT.minimumCompatibilityVersion(),
    -                    IndexVersion.MINIMUM_COMPATIBLE,
    +                    IndexVersions.MINIMUM_COMPATIBLE,
                         IndexVersion.current()
                     ),
                     transportVersion,
    @@ -2361,7 +2362,7 @@ public void testHandshakeUpdatesVersion() throws IOException {
                     "TS_C",
                     new VersionInformation(
                         Version.CURRENT.minimumCompatibilityVersion(),
    -                    IndexVersion.MINIMUM_COMPATIBLE,
    +                    IndexVersions.MINIMUM_COMPATIBLE,
                         IndexVersion.current()
                     ),
                     transportVersion,
    diff --git a/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java
    index 46b877a7eb4e3..d12966d4239a6 100644
    --- a/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java
    +++ b/test/framework/src/test/java/org/elasticsearch/test/index/IndexVersionUtilsTests.java
    @@ -9,6 +9,7 @@
     package org.elasticsearch.test.index;
     
     import org.elasticsearch.index.IndexVersion;
    +import org.elasticsearch.index.IndexVersions;
     import org.elasticsearch.test.ESTestCase;
     
     import java.util.ArrayList;
    @@ -19,7 +20,7 @@
     
     public class IndexVersionUtilsTests extends ESTestCase {
         /**
    -     * Tests that {@link IndexVersion#MINIMUM_COMPATIBLE} and {@link IndexVersionUtils#allReleasedVersions()}
    +     * Tests that {@link IndexVersions#MINIMUM_COMPATIBLE} and {@link IndexVersionUtils#allReleasedVersions()}
          * agree with the list of index compatible versions we build in gradle.
          */
         @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98054")
    @@ -29,7 +30,7 @@ public void testGradleVersionsMatchVersionUtils() {
                 .stream()
                 /* Java lists all versions from the 5.x series onwards, but we only want to consider
                  * ones that we're supposed to be compatible with. */
    -            .filter(v -> v.onOrAfter(IndexVersion.MINIMUM_COMPATIBLE))
    +            .filter(v -> v.onOrAfter(IndexVersions.MINIMUM_COMPATIBLE))
                 .toList();
     
             List releasedIndexCompatible = released.stream()
    diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java
    index 0220c0931bca1..6e9107152c6f7 100644
    --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java
    +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java
    @@ -10,6 +10,7 @@
     
     import org.apache.logging.log4j.LogManager;
     import org.apache.logging.log4j.Logger;
    +import org.elasticsearch.Build;
     import org.elasticsearch.Version;
     import org.elasticsearch.client.HasAttributeNodeSelector;
     import org.elasticsearch.client.Node;
    @@ -38,6 +39,7 @@
     import java.util.Objects;
     import java.util.Set;
     import java.util.TreeMap;
    +import java.util.function.Predicate;
     import java.util.regex.Pattern;
     
     import static java.util.Collections.emptyList;
    @@ -626,24 +628,45 @@ public String toString() {
             return result;
         }
     
    +    private static boolean matchWithRange(String nodeVersionString, List acceptedVersionRanges, XContentLocation location) {
    +        try {
    +            Version version = Version.fromString(nodeVersionString);
    +            return acceptedVersionRanges.stream().anyMatch(v -> v.contains(version));
    +        } catch (IllegalArgumentException e) {
    +            throw new XContentParseException(
    +                location,
    +                "[version] range node selector expects a semantic version format (x.y.z), but found " + nodeVersionString,
    +                e
    +            );
    +        }
    +    }
    +
         private static NodeSelector parseVersionSelector(XContentParser parser) throws IOException {
             if (false == parser.currentToken().isValue()) {
                 throw new XContentParseException(parser.getTokenLocation(), "expected [version] to be a value");
             }
    -        List skipVersionRanges = parser.text().equals("current")
    -            ? List.of(new VersionRange(Version.CURRENT, Version.CURRENT))
    -            : SkipSection.parseVersionRanges(parser.text());
    +
    +        final Predicate nodeMatcher;
    +        final String versionSelectorString;
    +        if (parser.text().equals("current")) {
    +            nodeMatcher = nodeVersion -> Build.current().version().equals(nodeVersion);
    +            versionSelectorString = "version is " + Build.current().version() + " (current)";
    +        } else {
    +            var acceptedVersionRange = SkipSection.parseVersionRanges(parser.text());
    +            nodeMatcher = nodeVersion -> matchWithRange(nodeVersion, acceptedVersionRange, parser.getTokenLocation());
    +            versionSelectorString = "version ranges " + acceptedVersionRange;
    +        }
    +
             return new NodeSelector() {
                 @Override
                 public void select(Iterable nodes) {
                     for (Iterator itr = nodes.iterator(); itr.hasNext();) {
                         Node node = itr.next();
    -                    if (node.getVersion() == null) {
    +                    String versionString = node.getVersion();
    +                    if (versionString == null) {
                             throw new IllegalStateException("expected [version] metadata to be set but got " + node);
                         }
    -                    Version version = Version.fromString(node.getVersion());
    -                    boolean skip = skipVersionRanges.stream().anyMatch(v -> v.contains(version));
    -                    if (false == skip) {
    +                    if (nodeMatcher.test(versionString) == false) {
                             itr.remove();
                         }
                     }
    @@ -651,7 +674,7 @@ public void select(Iterable nodes) {
     
                 @Override
                 public String toString() {
    -                return "version ranges " + skipVersionRanges;
    +                return versionSelectorString;
                 }
             };
         }
    diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java
    index 81255fe10d933..64832d47cc7b3 100644
    --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java
    +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java
    @@ -37,7 +37,8 @@ public interface ExecutableSection {
             new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("contains"), ContainsAssertion::parse),
             new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("length"), LengthAssertion::parse),
             new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("close_to"), CloseToAssertion::parse),
    -        new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("exists"), ExistsAssertion::parse)
    +        new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("exists"), ExistsAssertion::parse),
    +        new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("not_exists"), NotExistsAssertion::parse)
         );
     
         /**
    diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/NotExistsAssertion.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/NotExistsAssertion.java
    new file mode 100644
    index 0000000000000..f012bee73763b
    --- /dev/null
    +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/NotExistsAssertion.java
    @@ -0,0 +1,48 @@
    +/*
    + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
    + * or more contributor license agreements. Licensed under the Elastic License
    + * 2.0 and the Server Side Public License, v 1; you may not use this file except
    + * in compliance with, at your election, the Elastic License 2.0 or the Server
    + * Side Public License, v 1.
    + */
    +
    +package org.elasticsearch.test.rest.yaml.section;
    +
    +import org.apache.logging.log4j.LogManager;
    +import org.apache.logging.log4j.Logger;
    +import org.elasticsearch.xcontent.XContentLocation;
    +import org.elasticsearch.xcontent.XContentParser;
    +
    +import java.io.IOException;
    +
    +import static org.hamcrest.Matchers.nullValue;
    +import static org.junit.Assert.assertThat;
    +
    +/**
    + * Represents an exists assert section:
    + * 

    + * - not_exists: get.fields.bar + */ +public class NotExistsAssertion extends Assertion { + + private static final Logger logger = LogManager.getLogger(NotExistsAssertion.class); + + public static NotExistsAssertion parse(XContentParser parser) throws IOException { + return new NotExistsAssertion(parser.getTokenLocation(), ParserUtils.parseField(parser)); + } + + public NotExistsAssertion(XContentLocation location, String field) { + super(location, field, false /* not used */); + } + + @Override + protected void doAssert(Object actualValue, Object expectedValue) { + logger.trace("assert that field [{}] does not exists with any value", getField()); + String errorMessage = errorMessage(); + assertThat(errorMessage, actualValue, nullValue()); + } + + private String errorMessage() { + return "field [" + getField() + "] exists, but should not."; + } +} diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java index 1ac0bff285b96..953b5f261485d 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java @@ -185,4 +185,24 @@ public void testExists() throws IOException { AssertionError e = expectThrows(AssertionError.class, () -> existsAssertion.doAssert(null, existsAssertion.getExpectedValue())); assertThat(e.getMessage(), containsString("field [get.fields._timestamp] does not exist")); } + + public void testDoesNotExist() throws IOException { + parser = createParser(YamlXContent.yamlXContent, "get.fields._timestamp"); + + NotExistsAssertion existnotExistsAssertion = NotExistsAssertion.parse(parser); + + assertThat(existnotExistsAssertion, notNullValue()); + assertThat(existnotExistsAssertion.getField(), equalTo("get.fields._timestamp")); + + existnotExistsAssertion.doAssert(null, existnotExistsAssertion.getExpectedValue()); + + AssertionError e = expectThrows( + AssertionError.class, + () -> existnotExistsAssertion.doAssert( + randomFrom(1, "", "non-empty", List.of(), Map.of(), 0, false), + existnotExistsAssertion.getExpectedValue() + ) + ); + assertThat(e.getMessage(), containsString("field [get.fields._timestamp] exists, but should not")); + } } diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index 88c5fdfdb1e78..501f83bb02e1f 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.test.rest.yaml.section; import org.apache.http.HttpHost; +import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; @@ -19,6 +20,7 @@ import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ClientYamlTestResponse; import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.yaml.YamlXContent; import org.hamcrest.MatcherAssert; @@ -36,6 +38,7 @@ import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -576,7 +579,7 @@ public void testParseDoSectionAllowedWarnings() throws Exception { assertThat(e.getMessage(), equalTo("the warning [foo] was both allowed and expected")); } - public void testNodeSelectorByVersion() throws IOException { + public void testNodeSelectorByVersionRange() throws IOException { parser = createParser(YamlXContent.yamlXContent, """ node_selector: version: 5.2.0-6.0.0 @@ -626,6 +629,28 @@ public void testNodeSelectorByVersion() throws IOException { } } + public void testNodeSelectorByVersionRangeFailsWithNonSemanticVersion() throws IOException { + parser = createParser(YamlXContent.yamlXContent, """ + node_selector: + version: 5.2.0-6.0.0 + indices.get_field_mapping: + index: test_index"""); + + DoSection doSection = DoSection.parse(parser); + assertNotSame(NodeSelector.ANY, doSection.getApiCallSection().getNodeSelector()); + Node nonSemantic = nodeWithVersion("abddef"); + List nodes = new ArrayList<>(); + + var exception = expectThrows( + XContentParseException.class, + () -> doSection.getApiCallSection().getNodeSelector().select(List.of(nonSemantic)) + ); + assertThat( + exception.getMessage(), + endsWith("[version] range node selector expects a semantic version format (x.y.z), but found abddef") + ); + } + public void testNodeSelectorCurrentVersion() throws IOException { parser = createParser(YamlXContent.yamlXContent, """ node_selector: @@ -638,14 +663,16 @@ public void testNodeSelectorCurrentVersion() throws IOException { Node v170 = nodeWithVersion("1.7.0"); Node v521 = nodeWithVersion("5.2.1"); Node v550 = nodeWithVersion("5.5.0"); - Node current = nodeWithVersion(Version.CURRENT.toString()); + Node oldCurrent = nodeWithVersion(Version.CURRENT.toString()); + Node newCurrent = nodeWithVersion(Build.current().version()); List nodes = new ArrayList<>(); nodes.add(v170); nodes.add(v521); nodes.add(v550); - nodes.add(current); + nodes.add(oldCurrent); + nodes.add(newCurrent); doSection.getApiCallSection().getNodeSelector().select(nodes); - assertEquals(List.of(current), nodes); + assertEquals(List.of(oldCurrent, newCurrent), nodes); } private static Node nodeWithVersion(String version) { diff --git a/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java b/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java index 714892558d423..f8b276dbbf6a5 100644 --- a/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java +++ b/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java @@ -55,16 +55,14 @@ public void testRequestBreaker() throws Exception { ); try { - client().prepareSearch("test") - .addAggregation( - new MultiTermsAggregationBuilder("xxx").terms( - List.of( - new MultiValuesSourceFieldConfig.Builder().setFieldName("field0.keyword").build(), - new MultiValuesSourceFieldConfig.Builder().setFieldName("field1.keyword").build() - ) + prepareSearch("test").addAggregation( + new MultiTermsAggregationBuilder("xxx").terms( + List.of( + new MultiValuesSourceFieldConfig.Builder().setFieldName("field0.keyword").build(), + new MultiValuesSourceFieldConfig.Builder().setFieldName("field1.keyword").build() ) ) - .get(); + ).get(); } catch (ElasticsearchException e) { if (ExceptionsHelper.unwrap(e, CircuitBreakingException.class) == null) { throw e; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java index 9360f97990c82..28d9d87f147d3 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/AnalyticsPlugin.java @@ -8,27 +8,14 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.analytics.action.AnalyticsInfoTransportAction; import org.elasticsearch.xpack.analytics.action.AnalyticsUsageTransportAction; import org.elasticsearch.xpack.analytics.action.TransportAnalyticsStatsAction; @@ -64,7 +51,6 @@ import java.util.List; import java.util.Map; import java.util.function.Consumer; -import java.util.function.Supplier; public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugin, MapperPlugin { private final AnalyticsUsage usage = new AnalyticsUsage(); @@ -175,22 +161,7 @@ public List> getAggregationExtentions() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { return List.of(usage); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java index 9ff3024cfa59e..f66008bcc932c 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/aggregations/support/AnalyticsValuesSourceType.java @@ -11,13 +11,13 @@ import org.elasticsearch.script.AggregationScript; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationErrors; -import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceType; import java.util.Locale; +import java.util.function.LongSupplier; public enum AnalyticsValuesSourceType implements ValuesSourceType { HISTOGRAM() { @@ -49,7 +49,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { throw new IllegalArgumentException("Can't apply missing values on a " + valuesSource.getClass()); } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java index 37332fa49bfbf..634c76b819ea0 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java @@ -194,7 +194,17 @@ static Rounding.DateTimeUnit parse(String rateUnit) { @Override protected ValuesSourceConfig resolveConfig(AggregationContext context) { if (field() == null && script() == null) { - return new ValuesSourceConfig(CoreValuesSourceType.NUMERIC, null, true, null, null, 1.0, null, DocValueFormat.RAW, context); + return new ValuesSourceConfig( + CoreValuesSourceType.NUMERIC, + null, + true, + null, + null, + 1.0, + null, + DocValueFormat.RAW, + context::nowInMillis + ); } else { return super.resolveConfig(context); } diff --git a/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java b/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java index a0e6d0daac322..3dbe59dc825ff 100644 --- a/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java +++ b/x-pack/plugin/async/src/main/java/org/elasticsearch/xpack/async/AsyncResultsIndexPlugin.java @@ -7,33 +7,18 @@ package org.elasticsearch.xpack.async; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.async.AsyncTaskIndexService; import org.elasticsearch.xpack.core.async.AsyncTaskMaintenanceService; import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.function.Supplier; import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; @@ -61,31 +46,16 @@ public String getFeatureDescription() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { List components = new ArrayList<>(); - if (DiscoveryNode.canContainData(environment.settings())) { + if (DiscoveryNode.canContainData(services.environment().settings())) { // only data nodes should be eligible to run the maintenance service. AsyncTaskMaintenanceService maintenanceService = new AsyncTaskMaintenanceService( - clusterService, - nodeEnvironment.nodeId(), + services.clusterService(), + services.nodeEnvironment().nodeId(), settings, - threadPool, - new OriginSettingClient(client, ASYNC_SEARCH_ORIGIN) + services.threadPool(), + new OriginSettingClient(services.client(), ASYNC_SEARCH_ORIGIN) ); components.add(maintenanceService); } diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java index 0bd1731c48f0a..51d0c2c0aef80 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java @@ -351,7 +351,6 @@ public void testScaleWhileShrinking() throws Exception { .build() ) .setWaitForActiveShards(ActiveShardCount.NONE) - .get() ); // * 2 since worst case is no hard links, see DiskThresholdDecider.getExpectedShardSize. @@ -468,7 +467,6 @@ public void testScaleDuringSplitOrClone() throws Exception { ) .setWaitForActiveShards(ActiveShardCount.NONE) .setResizeType(resizeType) - .get() ); // * 2 since worst case is no hard links, see DiskThresholdDecider.getExpectedShardSize. diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java index c832a3c7eb461..e9d54826436c2 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/Autoscaling.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; @@ -23,22 +22,14 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.License; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.autoscaling.action.DeleteAutoscalingPolicyAction; @@ -109,27 +100,16 @@ public Autoscaling() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - this.clusterServiceHolder.set(clusterService); - this.allocationServiceHolder.set(allocationService); + public Collection createComponents(PluginServices services) { + this.clusterServiceHolder.set(services.clusterService()); + this.allocationServiceHolder.set(services.allocationService()); var capacityServiceHolder = new AutoscalingCalculateCapacityService.Holder(this); this.reservedAutoscalingPolicyAction.set(new ReservedAutoscalingPolicyAction(capacityServiceHolder)); - return List.of(capacityServiceHolder, autoscalingLicenseChecker, new AutoscalingNodeInfoService(clusterService, client)); + return List.of( + capacityServiceHolder, + autoscalingLicenseChecker, + new AutoscalingNodeInfoService(services.clusterService(), services.client()) + ); } @Override diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index 3eaf6f267a545..da39d221f92f1 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -23,8 +24,7 @@ def followCluster = testClusters.register("follow-cluster") { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" - } + setting 'cluster.remote.leader_cluster.seeds', { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -51,8 +51,8 @@ tasks.register("writeJavaPolicy") { tasks.register("follow-cluster", RestIntegTestTask) { dependsOn 'writeJavaPolicy', "leader-cluster" useCluster leaderCluster - systemProperty 'java.security.policy', "file://${policyFile}" systemProperty 'tests.target_cluster', 'follow' + nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 31cdf04f6dc94..2475a56aa87aa 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -25,7 +26,7 @@ def middleCluster = testClusters.register('middle-cluster') { setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" } + { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -60,9 +61,9 @@ testClusters.matching {it.name == "follow-cluster" }.configureEach { setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" } + { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE setting 'cluster.remote.middle_cluster.seeds', - { "\"${middleCluster.get().getAllTransportPortURI().join(",")}\"" } + { "\"${middleCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE } diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index 67465bc782ad9..7661ea08b057d 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -21,7 +22,7 @@ def followerCluster = testClusters.register('follow-cluster') { setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" } + { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE } tasks.register('leader-cluster', RestIntegTestTask) { @@ -36,4 +37,4 @@ tasks.register('follow-cluster', RestIntegTestTask) { nonInputProperties.systemProperty 'tests.leader_host', followerCluster.map(c -> c.allHttpSocketURI.get(0)) } -tasks.named("check").configure { dependsOn "follow-cluster" } \ No newline at end of file +tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index d354cd911f2f8..47d37801e2dcf 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -22,7 +23,7 @@ def followCluster = testClusters.register('follow-cluster') { setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().get(0)}\"" } + { "\"${leaderCluster.get().getAllTransportPortURI().get(0)}\"" }, IGNORE_VALUE nameCustomization = { 'follow' } } diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index ff4a4f857fe32..5515aefeaa091 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -22,7 +23,7 @@ testClusters.register('follow-cluster') { testDistribution = 'DEFAULT' setting 'cluster.remote.leader_cluster.seeds', { "\"${leadCluster.get().getAllTransportPortURI().join(",")}\"" - } + }, IGNORE_VALUE setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java index 5896ecacb9ca8..5031a52630033 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/AutoFollowIT.java @@ -694,7 +694,6 @@ public void testAutoFollowDatastreamWithClosingFollowerIndex() throws Exception .indices() .prepareCreate(indexInDatastream) .setMapping(MetadataIndexTemplateService.DEFAULT_TIMESTAMP_MAPPING_WITHOUT_ROUTING.toString()) - .get() ); leaderClient().prepareIndex(indexInDatastream) .setCreate(true) diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java index 26eb241c0293e..501a664d64698 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/FollowerFailOverIT.java @@ -254,7 +254,6 @@ public void testReadRequestsReturnLatestMappingVersion() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put("index.routing.allocation.require.box", "large") ) - .get() ); getFollowerCluster().startNode( onlyRoles(nodeAttributes, Set.of(DiscoveryNodeRole.DATA_ROLE, DiscoveryNodeRole.REMOTE_CLUSTER_CLIENT_ROLE)) diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java index dae2a102948dd..f21bfc07deba2 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/IndexFollowingIT.java @@ -109,7 +109,6 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -609,7 +608,6 @@ public void testFollowNonExistentIndex() throws Exception { .prepareCreate("test-follower") .setSource(indexSettings, XContentType.JSON) .setMasterNodeTimeout(TimeValue.MAX_VALUE) - .get() ); ensureLeaderGreen("test-leader"); ensureFollowerGreen("test-follower"); @@ -1754,14 +1752,14 @@ private String getIndexSettingsWithNestedMapping( return settings; } - private void putFollowerTemplate(String setting, String settingValue) throws InterruptedException, ExecutionException { + private void putFollowerTemplate(String setting, String settingValue) { Template template = new Template(Settings.builder().put(setting, settingValue).build(), null, null); ComposableIndexTemplate cit = new ComposableIndexTemplate(List.of("follower"), template, null, null, null, null); assertAcked( followerClient().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("my-it").indexTemplate(cit) - ).get() + ) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java index a139b9a55f1be..7234b7babffdc 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/Ccr.java @@ -18,7 +18,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -30,11 +29,9 @@ import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTasksExecutor; @@ -44,17 +41,13 @@ import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.ccr.action.AutoFollowCoordinator; @@ -177,44 +170,29 @@ public Ccr(final Settings settings) { @Override @SuppressWarnings("HiddenField") - public Collection createComponents( - final Client client, - final ClusterService clusterService, - final ThreadPool threadPool, - final ResourceWatcherService resourceWatcherService, - final ScriptService scriptService, - final NamedXContentRegistry xContentRegistry, - final Environment environment, - final NodeEnvironment nodeEnvironment, - final NamedWriteableRegistry namedWriteableRegistry, - final IndexNameExpressionResolver expressionResolver, - final Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - this.client = client; + public Collection createComponents(PluginServices services) { + this.client = services.client(); if (enabled == false) { return emptyList(); } - CcrSettings ccrSettings = new CcrSettings(settings, clusterService.getClusterSettings()); + CcrSettings ccrSettings = new CcrSettings(settings, services.clusterService().getClusterSettings()); this.ccrSettings.set(ccrSettings); - CcrRestoreSourceService restoreSourceService = new CcrRestoreSourceService(threadPool, ccrSettings); + CcrRestoreSourceService restoreSourceService = new CcrRestoreSourceService(services.threadPool(), ccrSettings); this.restoreSourceService.set(restoreSourceService); - return Arrays.asList( + return List.of( ccrLicenseChecker, restoreSourceService, - new CcrRepositoryManager(settings, clusterService, client), - new ShardFollowTaskCleaner(clusterService, threadPool, client), + new CcrRepositoryManager(settings, services.clusterService(), client), + new ShardFollowTaskCleaner(services.clusterService(), services.threadPool(), client), new AutoFollowCoordinator( settings, client, - clusterService, + services.clusterService(), ccrLicenseChecker, - threadPool::relativeTimeInMillis, - threadPool::absoluteTimeInMillis, - threadPool.executor(Ccr.CCR_THREAD_POOL_NAME) + services.threadPool()::relativeTimeInMillis, + services.threadPool()::absoluteTimeInMillis, + services.threadPool().executor(Ccr.CCR_THREAD_POOL_NAME) ) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index 42a9710e92023..35b02b26eca9c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -257,9 +257,9 @@ public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, Sna } @Override - public void getRepositoryData(ActionListener listener) { + public void getRepositoryData(Executor responseExecutor, ActionListener listener) { try { - csDeduplicator.execute(listener.map(response -> { + csDeduplicator.execute(new ThreadedActionListener<>(responseExecutor, listener.map(response -> { final Metadata remoteMetadata = response.getMetadata(); final String[] concreteAllIndices = remoteMetadata.getConcreteAllIndices(); final Map copiedSnapshotIds = Maps.newMapWithExpectedSize(concreteAllIndices.length); @@ -287,7 +287,7 @@ public void getRepositoryData(ActionListener listener) { IndexMetaDataGenerations.EMPTY, MISSING_UUID ); - })); + }))); } catch (Exception e) { assert false; listener.onFailure(e); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java index e004f9b8f81e8..d1dad46c5515c 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinatorTests.java @@ -35,6 +35,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -2494,7 +2495,7 @@ private static ClusterState createRemoteClusterState( ) { Settings.Builder indexSettings; if (enableSoftDeletes == false) { - indexSettings = settings(IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0)).put( + indexSettings = settings(IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0)).put( IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false ); diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 6f9c6b93ac373..f023837247345 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -58,6 +58,7 @@ dependencies { testImplementation project(path: ':modules:lang-mustache') testImplementation project(path: ':modules:analysis-common') testImplementation project(path: ':modules:rest-root') + testImplementation project(path: ':modules:health-shards-availability') testImplementation project(":client:rest-high-level") // Needed for Fips140ProviderVerificationTests testCompileOnly('org.bouncycastle:bc-fips:1.0.2') diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index d17637a3fd5a9..02f61498fa93a 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -116,15 +116,12 @@ public void testSnapshotAndRestore() throws Exception { assertHits(sourceIdx, builders.length, sourceHadDeletions); assertMappings(sourceIdx, requireRouting, useNested); SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> { - client().prepareSearch(sourceIdx).setQuery(QueryBuilders.idsQuery().addIds("" + randomIntBetween(0, builders.length))).get(); + prepareSearch(sourceIdx).setQuery(QueryBuilders.idsQuery().addIds("" + randomIntBetween(0, builders.length))).get(); }); assertTrue(e.toString().contains("_source only indices can't be searched or filtered")); // can-match phase pre-filters access to non-existing field - assertEquals( - 0, - client().prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")).get().getHits().getTotalHits().value - ); + assertEquals(0, prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")).get().getHits().getTotalHits().value); // make sure deletes do not work String idToDelete = "" + randomIntBetween(0, builders.length); expectThrows(ClusterBlockException.class, () -> client().prepareDelete(sourceIdx, idToDelete).setRouting("r" + idToDelete).get()); @@ -144,16 +141,11 @@ public void testSnapshotAndRestoreWithNested() throws Exception { assertMappings(sourceIdx, requireRouting, true); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch(sourceIdx) - .setQuery(QueryBuilders.idsQuery().addIds("" + randomIntBetween(0, builders.length))) - .get() + () -> prepareSearch(sourceIdx).setQuery(QueryBuilders.idsQuery().addIds("" + randomIntBetween(0, builders.length))).get() ); assertTrue(e.toString().contains("_source only indices can't be searched or filtered")); // can-match phase pre-filters access to non-existing field - assertEquals( - 0, - client().prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")).get().getHits().getTotalHits().value - ); + assertEquals(0, prepareSearch(sourceIdx).setQuery(QueryBuilders.termQuery("field1", "bar")).get().getHits().getTotalHits().value); // make sure deletes do not work String idToDelete = "" + randomIntBetween(0, builders.length); expectThrows(ClusterBlockException.class, () -> client().prepareDelete(sourceIdx, idToDelete).setRouting("r" + idToDelete).get()); @@ -259,10 +251,7 @@ private static void assertMappings(String sourceIdx, boolean requireRouting, boo } private void assertHits(String index, int numDocsExpected, boolean sourceHadDeletions) { - SearchResponse searchResponse = client().prepareSearch(index) - .addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) - .setSize(numDocsExpected) - .get(); + SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected).get(); BiConsumer assertConsumer = (res, allowHoles) -> { SearchHits hits = res.getHits(); long i = 0; @@ -283,8 +272,7 @@ private void assertHits(String index, int numDocsExpected, boolean sourceHadDele }; assertConsumer.accept(searchResponse, sourceHadDeletions); assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); - searchResponse = client().prepareSearch(index) - .addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) + searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll("1m") .slice(new SliceBuilder(SeqNoFieldMapper.NAME, randomIntBetween(0, 1), 2)) .setSize(randomIntBetween(1, 10)) @@ -348,7 +336,7 @@ private IndexRequestBuilder[] snapshotAndRestore(final String sourceIdx, final b } indexRandom(true, builders); flushAndRefresh(); - assertHitCount(client().prepareSearch(sourceIdx).setQuery(QueryBuilders.idsQuery().addIds("0")), 1); + assertHitCount(prepareSearch(sourceIdx).setQuery(QueryBuilders.idsQuery().addIds("0")), 1); createSnapshot(repo, snapshot, Collections.singletonList(sourceIdx)); diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierShardAvailabilityHealthIndicatorIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierShardAvailabilityHealthIndicatorIT.java index 00bcd66120352..7737a5b42dfae 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierShardAvailabilityHealthIndicatorIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/xpack/cluster/routing/allocation/DataTierShardAvailabilityHealthIndicatorIT.java @@ -25,6 +25,7 @@ import org.elasticsearch.health.GetHealthAction; import org.elasticsearch.health.HealthIndicatorResult; import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.health.plugin.ShardsAvailabilityPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; @@ -49,7 +50,7 @@ public class DataTierShardAvailabilityHealthIndicatorIT extends ESIntegTestCase @Override protected Collection> nodePlugins() { - return List.of(LocalStateCompositeXPackPlugin.class); + return List.of(LocalStateCompositeXPackPlugin.class, ShardsAvailabilityPlugin.class); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 1c8881637d4c6..d02e3f43d80cb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -13,17 +13,14 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.TransportAction; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.ClusterSettings; @@ -35,12 +32,10 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.core.Booleans; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.license.ClusterStateLicenseService; import org.elasticsearch.license.DeleteLicenseAction; @@ -83,14 +78,10 @@ import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackUsageRequest; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.snapshots.sourceonly.SourceOnlySnapshotRepository; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider; @@ -321,33 +312,24 @@ public Settings additionalSettings() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { List components = new ArrayList<>(); - final SSLService sslService = createSSLService(environment, resourceWatcherService); + final SSLService sslService = createSSLService(services.environment(), services.resourceWatcherService()); LicenseService licenseService = getLicenseService(); if (licenseService == null) { - licenseService = new ClusterStateLicenseService(settings, threadPool, clusterService, getClock(), getLicenseState()); + licenseService = new ClusterStateLicenseService( + settings, + services.threadPool(), + services.clusterService(), + getClock(), + getLicenseState() + ); setLicenseService(licenseService); } - setEpochMillisSupplier(threadPool::absoluteTimeInMillis); + setEpochMillisSupplier(services.threadPool()::absoluteTimeInMillis); // It is useful to override these as they are what guice is injecting into actions components.add(sslService); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java index 887d0d5ec2101..f3d47cea1f39f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java @@ -205,18 +205,16 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, boolean rolloverOnlyIfHasDocuments = LifecycleSettings.LIFECYCLE_ROLLOVER_ONLY_IF_HAS_DOCUMENTS_SETTING.get(metadata.settings()); RolloverRequest rolloverRequest = createRolloverRequest(rolloverTarget, masterTimeout, rolloverOnlyIfHasDocuments); - getClient().admin() - .indices() - .rolloverIndex( - rolloverRequest, - ActionListener.wrap( - response -> listener.onResponse( - rolloverRequest.getConditions().areConditionsMet(response.getConditionStatus()), - EmptyInfo.INSTANCE - ), - listener::onFailure - ) - ); + getClient().admin().indices().rolloverIndex(rolloverRequest, ActionListener.wrap(response -> { + final var conditionStatus = response.getConditionStatus(); + final var conditionsMet = rolloverRequest.getConditions().areConditionsMet(conditionStatus); + if (conditionsMet) { + logger.info("index [{}] is ready for rollover, conditions: [{}]", index.getName(), conditionStatus); + } else { + logger.debug("index [{}] is not ready for rollover, conditions: [{}]", index.getName(), conditionStatus); + } + listener.onResponse(conditionsMet, EmptyInfo.INSTANCE); + }, listener::onFailure)); } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java index e631e3efe5cb6..7ab8e41cd2453 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlConfigVersion.java @@ -268,17 +268,6 @@ public static MlConfigVersion max(MlConfigVersion version1, MlConfigVersion vers return version1.id > version2.id ? version1 : version2; } - // Visible only for testing - static MlConfigVersion fromVersion(Version version) { - if (version.equals(Version.V_8_10_0)) { - return V_10; - } - if (version.after(Version.V_8_10_0)) { - throw new IllegalArgumentException("Cannot convert " + version + ". Incompatible version"); - } - return fromId(version.id); - } - public static MlConfigVersion getMinMlConfigVersion(DiscoveryNodes nodes) { return getMinMaxMlConfigVersion(nodes).v1(); } @@ -308,10 +297,10 @@ public static Tuple getMinMaxMlConfigVersion(D public static MlConfigVersion getMlConfigVersionForNode(DiscoveryNode node) { String mlConfigVerStr = node.getAttributes().get(ML_CONFIG_VERSION_NODE_ATTR); - if (mlConfigVerStr == null) { - return fromVersion(node.getVersion()); + if (mlConfigVerStr != null) { + return fromString(mlConfigVerStr); } - return fromString(mlConfigVerStr); + return fromId(node.getPre811VersionId().orElseThrow(() -> new IllegalStateException("getting legacy version id not possible"))); } // Parse an MlConfigVersion from a string. @@ -329,12 +318,17 @@ public static MlConfigVersion fromString(String str) { if (str.startsWith("8.10.") || str.equals("8.11.0")) { return V_10; } - Matcher matcher = Pattern.compile("^(\\d+)\\.0\\.0$").matcher(str); - int versionNum; - if (matcher.matches() == false || (versionNum = Integer.parseInt(matcher.group(1))) < 10) { - return fromVersion(Version.fromString(str)); + Matcher matcher = Pattern.compile("^(\\d+)\\.(\\d+)\\.(\\d+)(?:-\\w+)?$").matcher(str); + if (matcher.matches() == false) { + throw new IllegalArgumentException("ML config version [" + str + "] not valid"); + } + int first = Integer.parseInt(matcher.group(1)); + int second = Integer.parseInt(matcher.group(2)); + int third = Integer.parseInt(matcher.group(3)); + if (first >= 10 && (second > 0 || third > 0)) { + throw new IllegalArgumentException("ML config version [" + str + "] not valid"); } - return fromId(1000000 * versionNum + 99); + return fromId(1000000 * first + 10000 * second + 100 * third + 99); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java index 3d6a1aef8477a..ef65f4bca1c35 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/TransformConfigVersion.java @@ -302,17 +302,6 @@ public static TransformConfigVersion max(TransformConfigVersion version1, Transf return version1.id > version2.id ? version1 : version2; } - // Visible only for testing - static TransformConfigVersion fromVersion(Version version) { - if (version.equals(Version.V_8_10_0)) { - return V_10; - } - if (version.after(Version.V_8_10_0)) { - throw new IllegalArgumentException("Cannot convert " + version + ". Incompatible version"); - } - return fromId(version.id); - } - public static TransformConfigVersion getMinTransformConfigVersion(DiscoveryNodes nodes) { return getMinMaxTransformConfigVersion(nodes).v1(); } @@ -342,10 +331,10 @@ public static Tuple getMinMaxTra public static TransformConfigVersion getTransformConfigVersionForNode(DiscoveryNode node) { String transformConfigVerStr = node.getAttributes().get(TRANSFORM_CONFIG_VERSION_NODE_ATTR); - if (transformConfigVerStr == null) { - return fromVersion(node.getVersion()); + if (transformConfigVerStr != null) { + return fromString(transformConfigVerStr); } - return fromString(transformConfigVerStr); + return fromId(node.getPre811VersionId().orElseThrow(() -> new IllegalStateException("getting legacy version id not possible"))); } // Parse an TransformConfigVersion from a string. @@ -358,12 +347,17 @@ public static TransformConfigVersion fromString(String str) { if (str.equals("8.10.0")) { return V_10; } - Matcher matcher = Pattern.compile("^(\\d+)\\.0\\.0$").matcher(str); - int versionNum; - if (matcher.matches() == false || (versionNum = Integer.parseInt(matcher.group(1))) < 10) { - return fromVersion(Version.fromString(str)); + Matcher matcher = Pattern.compile("^(\\d+)\\.(\\d+)\\.(\\d+)(?:-\\w+)?$").matcher(str); + if (matcher.matches() == false) { + throw new IllegalArgumentException("Transform config version [" + str + "] not valid"); + } + int first = Integer.parseInt(matcher.group(1)); + int second = Integer.parseInt(matcher.group(2)); + int third = Integer.parseInt(matcher.group(3)); + if (first >= 10 && (second > 0 || third > 0)) { + throw new IllegalArgumentException("Transform config version [" + str + "] not valid"); } - return fromId(1000000 * versionNum + 99); + return fromId(1000000 * first + 10000 * second + 100 * third + 99); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java index 1829e82758d3f..7ac27d79d3cb8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.transform.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; @@ -16,6 +17,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; import java.util.Arrays; @@ -24,6 +28,8 @@ import java.util.Map.Entry; import java.util.Objects; +import static org.elasticsearch.core.Strings.format; + /** * Transform internal API (no REST layer) to retrieve index checkpoints. */ @@ -42,16 +48,23 @@ public static class Request extends ActionRequest implements IndicesRequest.Repl private String[] indices; private final IndicesOptions indicesOptions; + private final TimeValue timeout; public Request(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_CHECKPOINT_TIMEOUT_ADDED)) { + timeout = in.readOptionalTimeValue(); + } else { + timeout = null; + } } - public Request(String[] indices, IndicesOptions indicesOptions) { + public Request(String[] indices, IndicesOptions indicesOptions, TimeValue timeout) { this.indices = indices != null ? indices : Strings.EMPTY_ARRAY; this.indicesOptions = indicesOptions; + this.timeout = timeout; } @Override @@ -69,6 +82,10 @@ public IndicesOptions indicesOptions() { return indicesOptions; } + public TimeValue getTimeout() { + return timeout; + } + @Override public boolean equals(Object obj) { if (obj == this) { @@ -79,12 +96,14 @@ public boolean equals(Object obj) { } Request that = (Request) obj; - return Arrays.equals(indices, that.indices) && Objects.equals(indicesOptions, that.indicesOptions); + return Arrays.equals(indices, that.indices) + && Objects.equals(indicesOptions, that.indicesOptions) + && Objects.equals(timeout, that.timeout); } @Override public int hashCode() { - return Objects.hash(Arrays.hashCode(indices), indicesOptions); + return Objects.hash(Arrays.hashCode(indices), indicesOptions, timeout); } @Override @@ -92,6 +111,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArray(indices); indicesOptions.writeIndicesOptions(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_CHECKPOINT_TIMEOUT_ADDED)) { + out.writeOptionalTimeValue(timeout); + } } @Override @@ -105,6 +127,11 @@ public IndicesRequest indices(String... indices) { public boolean allowsRemoteIndices() { return false; } + + @Override + public CancellableTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, format("get_checkpoint[%d]", indices.length), parentTaskId, headers); + } } public static class Response extends ActionResponse { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java index ebc34211040c9..8e67dbc6daacd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.transform.action; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; @@ -16,7 +17,10 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import java.io.IOException; import java.util.Arrays; @@ -25,6 +29,8 @@ import java.util.Objects; import java.util.Set; +import static org.elasticsearch.core.Strings.format; + public class GetCheckpointNodeAction extends ActionType { public static final GetCheckpointNodeAction INSTANCE = new GetCheckpointNodeAction(); @@ -86,16 +92,23 @@ public static class Request extends ActionRequest implements IndicesRequest { private final Set shards; private final OriginalIndices originalIndices; + private final TimeValue timeout; - public Request(Set shards, OriginalIndices originalIndices) { + public Request(Set shards, OriginalIndices originalIndices, TimeValue timeout) { this.shards = shards; this.originalIndices = originalIndices; + this.timeout = timeout; } public Request(StreamInput in) throws IOException { super(in); this.shards = in.readCollectionAsImmutableSet(ShardId::new); this.originalIndices = OriginalIndices.readOriginalIndices(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_CHECKPOINT_TIMEOUT_ADDED)) { + this.timeout = in.readOptionalTimeValue(); + } else { + this.timeout = null; + } } @Override @@ -108,6 +121,9 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeCollection(shards); OriginalIndices.writeOriginalIndices(originalIndices, out); + if (out.getTransportVersion().onOrAfter(TransportVersions.TRANSFORM_GET_CHECKPOINT_TIMEOUT_ADDED)) { + out.writeOptionalTimeValue(timeout); + } } public Set getShards() { @@ -118,6 +134,10 @@ public OriginalIndices getOriginalIndices() { return originalIndices; } + public TimeValue getTimeout() { + return timeout; + } + @Override public boolean equals(Object obj) { if (obj == this) { @@ -128,12 +148,14 @@ public boolean equals(Object obj) { } Request that = (Request) obj; - return Objects.equals(shards, that.shards) && Objects.equals(originalIndices, that.originalIndices); + return Objects.equals(shards, that.shards) + && Objects.equals(originalIndices, that.originalIndices) + && Objects.equals(timeout, that.timeout); } @Override public int hashCode() { - return Objects.hash(shards, originalIndices); + return Objects.hash(shards, originalIndices, timeout); } @Override @@ -146,5 +168,16 @@ public IndicesOptions indicesOptions() { return originalIndices.indicesOptions(); } + @Override + public CancellableTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask( + id, + type, + action, + format("get_checkpoint_node[%d;%d]", indices() != null ? indices().length : 0, shards != null ? shards.size() : 0), + parentTaskId, + headers + ); + } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java index ad9ffe61df928..fc257258eb213 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/AbstractClusterStateLicenseServiceTestCase.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -72,7 +73,8 @@ protected void setInitialState(License license, XPackLicenseState licenseState, when(state.metadata()).thenReturn(metadata); final DiscoveryNode mockNode = getLocalNode(); when(discoveryNodes.getMasterNode()).thenReturn(mockNode); - when(discoveryNodes.stream()).thenAnswer(invocation -> Stream.of(mockNode)); + when(discoveryNodes.stream()).thenAnswer(i -> Stream.of(mockNode)); + when(discoveryNodes.iterator()).thenAnswer(i -> Iterators.single(mockNode)); when(discoveryNodes.isLocalNodeElectedMaster()).thenReturn(false); when(discoveryNodes.getMinNodeVersion()).thenReturn(mockNode.getVersion()); when(state.nodes()).thenReturn(discoveryNodes); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseClusterChangeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseClusterChangeTests.java index 3e52fcfce2bc0..5eba6ff46d093 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseClusterChangeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseClusterChangeTests.java @@ -65,7 +65,7 @@ public void testNoNotificationOnExistingLicense() throws Exception { public void testSelfGeneratedLicenseGeneration() throws Exception { DiscoveryNode master = DiscoveryNodeUtils.builder("b").roles(emptySet()).build(); ClusterState oldState = ClusterState.builder(new ClusterName("a")) - .nodes(DiscoveryNodes.builder().masterNodeId(master.getId()).add(master)) + .nodes(DiscoveryNodes.builder().masterNodeId(master.getId()).localNodeId(master.getId()).add(master)) .build(); when(discoveryNodes.isLocalNodeElectedMaster()).thenReturn(true); ClusterState newState = ClusterState.builder(oldState).nodes(discoveryNodes).build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index de7b35ae0c78d..462e1942018ee 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.IOUtils; import org.elasticsearch.env.Environment; @@ -364,7 +365,11 @@ public void onFailure(Exception e) { ); IndexMetadata metadata = runAsSnapshot( threadPool, - () -> repository.getSnapshotIndexMetaData(PlainActionFuture.get(repository::getRepositoryData), snapshotId, indexId) + () -> repository.getSnapshotIndexMetaData( + PlainActionFuture.get(listener -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, listener)), + snapshotId, + indexId + ) ); IndexShard restoredShard = newShard( shardRouting, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index afc64140004c7..26436e497a644 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -25,7 +25,6 @@ import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; @@ -44,7 +43,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexModule; @@ -54,7 +52,6 @@ import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.MetadataFieldMapper; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -81,22 +78,18 @@ import org.elasticsearch.plugins.ShutdownAwarePlugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.plugins.interceptor.RestServerActionPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestHeaderDefinition; import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.snapshots.Snapshot; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportInterceptor; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -193,61 +186,10 @@ protected void setEpochMillisSupplier(LongSupplier epochMillisSupplier) { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - List components = new ArrayList<>( - super.createComponents( - client, - clusterService, - threadPool, - resourceWatcherService, - scriptService, - xContentRegistry, - environment, - nodeEnvironment, - namedWriteableRegistry, - expressionResolver, - repositoriesServiceSupplier, - telemetryProvider, - allocationService, - indicesService - ) - ); + public Collection createComponents(PluginServices services) { + List components = new ArrayList<>(super.createComponents(services)); - filterPlugins(Plugin.class).forEach( - p -> components.addAll( - p.createComponents( - client, - clusterService, - threadPool, - resourceWatcherService, - scriptService, - xContentRegistry, - environment, - nodeEnvironment, - namedWriteableRegistry, - expressionResolver, - repositoriesServiceSupplier, - telemetryProvider, - allocationService, - indicesService - ) - ) - ); + filterPlugins(Plugin.class).forEach(p -> components.addAll(p.createComponents(services))); return components; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java index 23633138d570d..d4c8dfa5fd0a7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/XPackPluginTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; @@ -29,6 +28,7 @@ import org.elasticsearch.license.PutLicenseRequest; import org.elasticsearch.license.internal.MutableLicenseService; import org.elasticsearch.plugins.ExtensiblePlugin; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.protocol.xpack.license.PutLicenseResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -74,9 +74,6 @@ public void setup() { public void testXPackInstalledAttrClash() throws Exception { Settings.Builder builder = Settings.builder(); builder.put("node.attr." + XPackPlugin.XPACK_INSTALLED_NODE_ATTR, randomBoolean()); - if (randomBoolean()) { - builder.put(Client.CLIENT_TYPE_SETTING_S.getKey(), "transport"); - } XPackPlugin xpackPlugin = createXPackPlugin(builder.put("path.home", createTempDir()).build()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, xpackPlugin::additionalSettings); assertThat( @@ -147,22 +144,11 @@ public List loadExtensions(Class extensionPointType) { when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); // ensure createComponents does not influence the results - xpackPlugin.createComponents( - null, - mock(ClusterService.class), - mock(ThreadPool.class), - null, - null, - null, - mockEnvironment, - null, - null, - null, - null, - null, - null, - null - ); + Plugin.PluginServices services = mock(Plugin.PluginServices.class); + when(services.clusterService()).thenReturn(mock(ClusterService.class)); + when(services.threadPool()).thenReturn(mock(ThreadPool.class)); + when(services.environment()).thenReturn(mockEnvironment); + xpackPlugin.createComponents(services); assertEquals(license, XPackPlugin.getSharedLicenseService().getLicense()); assertEquals(License.OperationMode.resolve(licenseType), XPackPlugin.getSharedLicenseState().getOperationMode()); } @@ -201,22 +187,11 @@ public List loadExtensions(Class extensionPointType) { Environment mockEnvironment = mock(Environment.class); when(mockEnvironment.settings()).thenReturn(Settings.builder().build()); when(mockEnvironment.configFile()).thenReturn(PathUtils.get("")); - xpackPlugin.createComponents( - null, - mock(ClusterService.class), - mock(ThreadPool.class), - null, - null, - null, - mockEnvironment, - null, - null, - null, - null, - null, - null, - null - ); + Plugin.PluginServices services = mock(Plugin.PluginServices.class); + when(services.clusterService()).thenReturn(mock(ClusterService.class)); + when(services.threadPool()).thenReturn(mock(ThreadPool.class)); + when(services.environment()).thenReturn(mockEnvironment); + xpackPlugin.createComponents(services); assertThat(XPackPlugin.getSharedLicenseService(), instanceOf(ClusterStateLicenseService.class)); assertEquals(License.OperationMode.TRIAL, XPackPlugin.getSharedLicenseState().getOperationMode()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java index d43295b2fe543..fc35a4b4761bd 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; @@ -36,6 +35,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.is; // TODO: test CRUD operations @@ -224,8 +224,7 @@ public void testAutoCreateIndex() throws Exception { } // Delete the index, so we can test subsequent auto-create behaviour - AcknowledgedResponse ack = client().admin().indices().prepareDelete(index).get(); - assertTrue(ack.isAcknowledged()); + assertAcked(client().admin().indices().prepareDelete(index)); // Subsequent response deletes throw a (wrapped) index not found exception { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java index 0592be1652c88..78dcaa1be92af 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/SetSingleNodeAllocateStepTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.Node; import org.elasticsearch.test.VersionUtils; @@ -402,7 +403,7 @@ public void testPerformActionSomeShardsOnlyOnNewNodes() throws Exception { Version.fromId(Version.CURRENT.major * 1_000_000 + 99), VersionUtils.getPreviousVersion() ), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); final int numNodes = randomIntBetween(2, 20); // Need at least 2 nodes to have some nodes on a new version @@ -471,7 +472,7 @@ public void testPerformActionSomeShardsOnlyOnNewNodesButNewNodesInvalidAttrs() { Version.fromId(Version.CURRENT.major * 1_000_000 + 99), VersionUtils.getPreviousVersion() ), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); final int numNodes = randomIntBetween(2, 20); // Need at least 2 nodes to have some nodes on a new version @@ -548,7 +549,7 @@ public void testPerformActionNewShardsExistButWithInvalidAttributes() throws Exc Version.fromId(Version.CURRENT.major * 1_000_000 + 99), VersionUtils.getPreviousVersion() ), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersionUtils.randomCompatibleVersion(random()) ); final int numNodes = randomIntBetween(2, 20); // Need at least 2 nodes to have some nodes on a new version diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java index 75eee620c031c..f97d9e1f21d07 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/MlConfigVersionTests.java @@ -167,7 +167,7 @@ public void testGetMlConfigVersionForNode() { .version(VersionInformation.inferVersions(Version.fromString("8.7.0"))) .build(); MlConfigVersion mlConfigVersion1 = MlConfigVersion.getMlConfigVersionForNode(node1); - assertEquals(MlConfigVersion.fromVersion(Version.V_8_5_0), mlConfigVersion1); + assertEquals(MlConfigVersion.V_8_5_0, mlConfigVersion1); } public void testDefinedConstants() throws IllegalAccessException { @@ -232,19 +232,6 @@ public void testMax() { ); } - public void testFromVersion() { - Version version_V_7_7_0 = Version.V_7_0_0; - MlConfigVersion mlConfigVersion_V_7_7_0 = MlConfigVersion.fromVersion(version_V_7_7_0); - assertEquals(version_V_7_7_0.id, mlConfigVersion_V_7_7_0.id()); - - // Version 8.10.0 is treated as if it is MlConfigVersion V_10. - assertEquals(MlConfigVersion.V_10.id(), MlConfigVersion.fromVersion(Version.V_8_10_0).id()); - - // There's no mapping between Version and MlConfigVersion values after Version.V_8_10_0. - Exception e = expectThrows(IllegalArgumentException.class, () -> MlConfigVersion.fromVersion(Version.fromId(8_11_00_99))); - assertEquals("Cannot convert " + Version.fromId(8_11_00_99) + ". Incompatible version", e.getMessage()); - } - public void testVersionConstantPresent() { Set ignore = Set.of(MlConfigVersion.ZERO, MlConfigVersion.CURRENT, MlConfigVersion.FIRST_ML_VERSION); assertThat(MlConfigVersion.CURRENT, sameInstance(MlConfigVersion.fromId(MlConfigVersion.CURRENT.id()))); @@ -298,13 +285,9 @@ public void testFromString() { assertEquals(false, KnownMlConfigVersions.ALL_VERSIONS.contains(unknownVersion)); assertEquals(MlConfigVersion.CURRENT.id() + 1, unknownVersion.id()); - for (String version : new String[] { "10.2", "7.17.2.99" }) { + for (String version : new String[] { "10.2", "7.17.2.99", "9" }) { Exception e = expectThrows(IllegalArgumentException.class, () -> MlConfigVersion.fromString(version)); - assertEquals("the version needs to contain major, minor, and revision, and optionally the build: " + version, e.getMessage()); + assertEquals("ML config version [" + version + "] not valid", e.getMessage()); } - - String version = "9"; - Exception e = expectThrows(IllegalArgumentException.class, () -> MlConfigVersion.fromString(version)); - assertEquals("the version needs to contain major, minor, and revision, and optionally the build: " + version, e.getMessage()); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java index 8b83b9dfd3bff..b42056372b1ab 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/TransformConfigVersionTests.java @@ -172,7 +172,7 @@ public void testGetTransformConfigVersionForNode() { .version(VersionInformation.inferVersions(Version.fromString("8.7.0"))) .build(); TransformConfigVersion TransformConfigVersion1 = TransformConfigVersion.getTransformConfigVersionForNode(node1); - assertEquals(TransformConfigVersion.fromVersion(Version.V_8_5_0), TransformConfigVersion1); + assertEquals(TransformConfigVersion.V_8_5_0, TransformConfigVersion1); } public void testDefinedConstants() throws IllegalAccessException { @@ -246,19 +246,6 @@ public void testMax() { ); } - public void testFromVersion() { - Version version_V_7_7_0 = Version.V_7_0_0; - TransformConfigVersion TransformConfigVersion_V_7_7_0 = TransformConfigVersion.fromVersion(version_V_7_7_0); - assertEquals(version_V_7_7_0.id, TransformConfigVersion_V_7_7_0.id()); - - // Version 8.10.0 is treated as if it is TransformConfigVersion V_10. - assertEquals(TransformConfigVersion.V_10.id(), TransformConfigVersion.fromVersion(Version.V_8_10_0).id()); - - // There's no mapping between Version and TransformConfigVersion values after Version.V_8_10_0. - Exception e = expectThrows(IllegalArgumentException.class, () -> TransformConfigVersion.fromVersion(Version.fromId(8_11_00_99))); - assertEquals("Cannot convert " + Version.fromId(8_11_00_99) + ". Incompatible version", e.getMessage()); - } - public void testVersionConstantPresent() { Set ignore = Set.of( TransformConfigVersion.ZERO, @@ -316,13 +303,9 @@ public void testFromString() { assertEquals(false, KnownTransformConfigVersions.ALL_VERSIONS.contains(unknownVersion)); assertEquals(TransformConfigVersion.CURRENT.id() + 1, unknownVersion.id()); - for (String version : new String[] { "10.2", "7.17.2.99" }) { + for (String version : new String[] { "10.2", "7.17.2.99", "9" }) { Exception e = expectThrows(IllegalArgumentException.class, () -> TransformConfigVersion.fromString(version)); - assertEquals("the version needs to contain major, minor, and revision, and optionally the build: " + version, e.getMessage()); + assertEquals("Transform config version [" + version + "] not valid", e.getMessage()); } - - String version = "9"; - Exception e = expectThrows(IllegalArgumentException.class, () -> TransformConfigVersion.fromString(version)); - assertEquals("the version needs to contain major, minor, and revision, and optionally the build: " + version, e.getMessage()); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java index 0bda09772da64..43ec0a0f1b4f5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java @@ -10,6 +10,9 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; @@ -17,21 +20,17 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; +import java.util.Map; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class GetCheckpointActionRequestTests extends AbstractWireSerializingTestCase { @Override protected Request createTestInstance() { - return new Request( - randomBoolean() ? null : generateRandomStringArray(10, 10, false, false), - IndicesOptions.fromParameters( - randomFrom(IndicesOptions.WildcardStates.values()).name().toLowerCase(Locale.ROOT), - Boolean.toString(randomBoolean()), - Boolean.toString(randomBoolean()), - Boolean.toString(randomBoolean()), - SearchRequest.DEFAULT_INDICES_OPTIONS - ) - ); + return randomRequest(randomBoolean() ? 10 : null); } @Override @@ -43,8 +42,9 @@ protected Reader instanceReader() { protected Request mutateInstance(Request instance) { List indices = instance.indices() != null ? new ArrayList<>(Arrays.asList(instance.indices())) : new ArrayList<>(); IndicesOptions indicesOptions = instance.indicesOptions(); + TimeValue timeout = instance.getTimeout(); - switch (between(0, 1)) { + switch (between(0, 2)) { case 0: indices.add(randomAlphaOfLengthBetween(1, 20)); break; @@ -57,10 +57,39 @@ protected Request mutateInstance(Request instance) { SearchRequest.DEFAULT_INDICES_OPTIONS ); break; + case 2: + timeout = timeout != null ? null : TimeValue.timeValueSeconds(randomIntBetween(1, 300)); + break; default: throw new AssertionError("Illegal randomization branch"); } - return new Request(indices.toArray(new String[0]), indicesOptions); + return new Request(indices.toArray(new String[0]), indicesOptions, timeout); + } + + public void testCreateTask() { + Request request = randomRequest(17); + CancellableTask task = request.createTask(123, "type", "action", new TaskId("dummy-node:456"), Map.of()); + assertThat(task.getDescription(), is(equalTo("get_checkpoint[17]"))); + } + + public void testCreateTaskWithNullIndices() { + Request request = new Request(null, null, null); + CancellableTask task = request.createTask(123, "type", "action", new TaskId("dummy-node:456"), Map.of()); + assertThat(task.getDescription(), is(equalTo("get_checkpoint[0]"))); + } + + private static Request randomRequest(Integer numIndices) { + return new Request( + numIndices != null ? Stream.generate(() -> randomAlphaOfLength(10)).limit(numIndices).toArray(String[]::new) : null, + IndicesOptions.fromParameters( + randomFrom(IndicesOptions.WildcardStates.values()).name().toLowerCase(Locale.ROOT), + Boolean.toString(randomBoolean()), + Boolean.toString(randomBoolean()), + Boolean.toString(randomBoolean()), + SearchRequest.DEFAULT_INDICES_OPTIONS + ), + randomBoolean() ? TimeValue.timeValueSeconds(randomIntBetween(1, 300)) : null + ); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java index ac5d1e3859445..b2cb2ae68f113 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java @@ -10,13 +10,20 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Request; import java.util.HashSet; +import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + public class GetCheckpointNodeActionRequestTests extends AbstractWireSerializingTestCase { @Override @@ -26,21 +33,17 @@ protected Reader instanceReader() { @Override protected Request createTestInstance() { - Set shards = new HashSet<>(); - OriginalIndices originalIndices = randomOriginalIndices(randomIntBetween(0, 20)); - int numberOfRandomShardIds = randomInt(10); - - for (int i = 0; i < numberOfRandomShardIds; ++i) { - shards.add(new ShardId(randomAlphaOfLength(4) + i, randomAlphaOfLength(4), randomInt(5))); - } - - return new Request(shards, originalIndices); + return new Request( + randomShards(randomInt(10)), + randomOriginalIndices(randomIntBetween(0, 20)), + randomBoolean() ? randomTimeout() : null + ); } @Override protected Request mutateInstance(Request instance) { - switch (random().nextInt(1)) { + switch (random().nextInt(2)) { case 0 -> { Set shards = new HashSet<>(instance.getShards()); if (randomBoolean() && shards.size() > 0) { @@ -52,17 +55,56 @@ protected Request mutateInstance(Request instance) { } else { shards.add(new ShardId(randomAlphaOfLength(8), randomAlphaOfLength(4), randomInt(5))); } - return new Request(shards, instance.getOriginalIndices()); + return new Request(shards, instance.getOriginalIndices(), instance.getTimeout()); } case 1 -> { OriginalIndices originalIndices = randomOriginalIndices(instance.indices().length + 1); - return new Request(instance.getShards(), originalIndices); + return new Request(instance.getShards(), originalIndices, instance.getTimeout()); + } + case 2 -> { + return new Request( + instance.getShards(), + instance.getOriginalIndices(), + instance.getTimeout() != null ? null : randomTimeout() + ); } default -> throw new IllegalStateException("The test should only allow 1 parameters mutated"); } } - private OriginalIndices randomOriginalIndices(int numIndices) { + public void testCreateTask() { + Request request = new Request(randomShards(7), randomOriginalIndices(19), null); + CancellableTask task = request.createTask(123, "type", "action", new TaskId("dummy-node:456"), Map.of()); + assertThat(task.getDescription(), is(equalTo("get_checkpoint_node[19;7]"))); + } + + public void testCreateTaskWithNullShardsAndIndices() { + Request request = new Request(null, OriginalIndices.NONE, null); + CancellableTask task = request.createTask(123, "type", "action", new TaskId("dummy-node:456"), Map.of()); + assertThat(task.getDescription(), is(equalTo("get_checkpoint_node[0;0]"))); + } + + public void testCreateTaskWithNullShards() { + Request request = new Request(null, randomOriginalIndices(13), null); + CancellableTask task = request.createTask(123, "type", "action", new TaskId("dummy-node:456"), Map.of()); + assertThat(task.getDescription(), is(equalTo("get_checkpoint_node[13;0]"))); + } + + public void testCreateTaskWithNullIndices() { + Request request = new Request(randomShards(11), OriginalIndices.NONE, null); + CancellableTask task = request.createTask(123, "type", "action", new TaskId("dummy-node:456"), Map.of()); + assertThat(task.getDescription(), is(equalTo("get_checkpoint_node[0;11]"))); + } + + private static Set randomShards(int numShards) { + Set shards = new HashSet<>(); + for (int i = 0; i < numShards; ++i) { + shards.add(new ShardId(randomAlphaOfLength(4) + i, randomAlphaOfLength(4), randomInt(5))); + } + return shards; + } + + private static OriginalIndices randomOriginalIndices(int numIndices) { String[] randomIndices = new String[numIndices]; for (int i = 0; i < numIndices; i++) { randomIndices[i] = randomAlphaOfLengthBetween(5, 10); @@ -71,4 +113,7 @@ private OriginalIndices randomOriginalIndices(int numIndices) { return new OriginalIndices(randomIndices, indicesOptions); } + private static TimeValue randomTimeout() { + return TimeValue.timeValueSeconds(randomIntBetween(1, 300)); + } } diff --git a/x-pack/plugin/core/template-resources/src/main/resources/180-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/180-days@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/180-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/180-days@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/30-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/30-days@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/30-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/30-days@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/365-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/365-days@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/365-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/365-days@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/7-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/7-days@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/7-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/7-days@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/90-days-default.json b/x-pack/plugin/core/template-resources/src/main/resources/90-days@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/90-days-default.json rename to x-pack/plugin/core/template-resources/src/main/resources/90-days@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/data-streams@mappings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/data-streams-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/data-streams@mappings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ecs-dynamic-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/ecs-dynamic-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/ecs@mappings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting-template.json b/x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/kibana-reporting@template.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-default-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@default-pipeline.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/logs-default-pipeline.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs@default-pipeline.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-json-message-pipeline.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@json-pipeline.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/logs-json-message-pipeline.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs@json-pipeline.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/logs-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@mappings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/logs-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs@mappings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json similarity index 90% rename from x-pack/plugin/core/template-resources/src/main/resources/logs-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json index 250d254899204..cc61f195402fe 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs-settings.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@settings.json @@ -12,7 +12,7 @@ "mapping": { "ignore_malformed": true }, - "default_pipeline": "logs-default-pipeline" + "default_pipeline": "logs@default-pipeline" } } }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/logs-template.json b/x-pack/plugin/core/template-resources/src/main/resources/logs@template.json similarity index 83% rename from x-pack/plugin/core/template-resources/src/main/resources/logs-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/logs@template.json index f232f00a8674f..b41b2d0453c89 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/logs-template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/logs@template.json @@ -3,10 +3,10 @@ "priority": 100, "data_stream": {}, "composed_of": [ - "logs-mappings", - "logs-settings", + "logs@mappings", + "logs@settings", "logs@custom", - "ecs@dynamic_templates" + "ecs@mappings" ], "ignore_missing_component_templates": ["logs@custom"], "allow_auto_create": true, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/metrics-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@mappings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/metrics-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics@mappings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@settings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/metrics-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics@settings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics-template.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json similarity index 78% rename from x-pack/plugin/core/template-resources/src/main/resources/metrics-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json index b0c4308722912..a596314bc9e8c 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/metrics-template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/metrics@template.json @@ -3,9 +3,9 @@ "priority": 100, "data_stream": {}, "composed_of": [ - "metrics-mappings", - "data-streams-mappings", - "metrics-settings" + "metrics@mappings", + "data-streams@mappings", + "metrics@settings" ], "allow_auto_create": true, "_meta": { diff --git a/x-pack/plugin/core/template-resources/src/main/resources/metrics-tsdb-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/metrics@tsdb-settings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/metrics-tsdb-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/metrics@tsdb-settings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-costs.json b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-costs.json new file mode 100644 index 0000000000000..7f54b012f8803 --- /dev/null +++ b/x-pack/plugin/core/template-resources/src/main/resources/profiling/index-template/profiling-costs.json @@ -0,0 +1,60 @@ +{ + "index_patterns": [ + ".profiling-costs*" + ], + "template": { + "settings": { + "index": { + "number_of_replicas": 0, + "auto_expand_replicas": "0-1", + "refresh_interval": "30s", + "hidden": true + } + }, + "mappings": { + "_source": { + "mode": "synthetic" + }, + "_meta": { + "index-template-version": ${xpack.profiling.template.version}, + "index-version": ${xpack.profiling.index.costs.version} + }, + "dynamic": false, + "properties": { + "ecs.version": { + "type": "keyword", + "index": true + }, + "@timestamp": { // creation date + "type": "date", + "index": true + }, + "provider": { + "type": "keyword", + "index": true + }, + "region": { + "type": "keyword", + "index": true + }, + "instance_type": { + "type": "keyword", + "index": true + }, + "co2_factor": { + "type": "double", + "index": false + }, + "cost_factor": { + "type": "double", + "index": false + } + } + } + }, + "priority": 100, + "_meta": { + "description": "Index template for .profiling-costs" + }, + "version": ${xpack.profiling.template.version} +} diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@lifecycle.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/synthetics-policy.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics@lifecycle.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics-mappings.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@mappings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/synthetics-mappings.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics@mappings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics-settings.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@settings.json similarity index 100% rename from x-pack/plugin/core/template-resources/src/main/resources/synthetics-settings.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics@settings.json diff --git a/x-pack/plugin/core/template-resources/src/main/resources/synthetics-template.json b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json similarity index 77% rename from x-pack/plugin/core/template-resources/src/main/resources/synthetics-template.json rename to x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json index 0e292f3d8694f..6369bd5a82c15 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/synthetics-template.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/synthetics@template.json @@ -3,9 +3,9 @@ "priority": 100, "data_stream": {}, "composed_of": [ - "synthetics-mappings", - "data-streams-mappings", - "synthetics-settings" + "synthetics@mappings", + "data-streams@mappings", + "synthetics@settings" ], "allow_auto_create": true, "_meta": { diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java index 9d5ea622568c9..39cf434685b27 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/DeprecationHttpIT.java @@ -36,6 +36,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; @@ -48,6 +49,7 @@ import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.hasEntry; @@ -108,8 +110,13 @@ private void resetDeprecationIndexAndCache() throws Exception { } catch (Exception e) { throw new AssertionError(e); } - }, 30, TimeUnit.SECONDS); + + assertBusy(() -> { + // wait for the data stream to really be deleted + var response = ESRestTestCase.entityAsMap(client().performRequest(new Request("GET", "/_data_stream"))); + assertThat((Collection) response.get("data_streams"), empty()); + }); } /** @@ -562,7 +569,6 @@ public void testDeprecationWarnMessagesCanBeIndexed() throws Exception { /** * Check that log messages about REST API compatibility are recorded to an index */ - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/96723") public void testCompatibleMessagesCanBeIndexed() throws Exception { final Request compatibleRequest = new Request("GET", "/_test_cluster/compat_only"); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java index 349ff1042d3a9..dd060653a4f34 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/Deprecation.java @@ -8,31 +8,18 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.RateLimitingFilter; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.deprecation.logging.DeprecationCacheResetAction; import org.elasticsearch.xpack.deprecation.logging.DeprecationIndexingComponent; import org.elasticsearch.xpack.deprecation.logging.DeprecationIndexingTemplateRegistry; @@ -88,43 +75,29 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { final DeprecationIndexingTemplateRegistry templateRegistry = new DeprecationIndexingTemplateRegistry( - environment.settings(), - clusterService, - threadPool, - client, - xContentRegistry + services.environment().settings(), + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() ); templateRegistry.initialize(); final RateLimitingFilter rateLimitingFilterForIndexing = new RateLimitingFilter(); // enable on start. - rateLimitingFilterForIndexing.setUseXOpaqueId(USE_X_OPAQUE_ID_IN_FILTERING.get(environment.settings())); - clusterService.getClusterSettings() + rateLimitingFilterForIndexing.setUseXOpaqueId(USE_X_OPAQUE_ID_IN_FILTERING.get(services.environment().settings())); + services.clusterService() + .getClusterSettings() .addSettingsUpdateConsumer(USE_X_OPAQUE_ID_IN_FILTERING, rateLimitingFilterForIndexing::setUseXOpaqueId); final DeprecationIndexingComponent component = DeprecationIndexingComponent.createDeprecationIndexingComponent( - client, - environment.settings(), + services.client(), + services.environment().settings(), rateLimitingFilterForIndexing, - WRITE_DEPRECATION_LOGS_TO_INDEX.get(environment.settings()), // pass the default on startup - clusterService + WRITE_DEPRECATION_LOGS_TO_INDEX.get(services.environment().settings()), // pass the default on startup + services.clusterService() ); return List.of(component, rateLimitingFilterForIndexing); diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index 316f29dde7999..49bdfd58eafd8 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -13,6 +13,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -32,7 +33,7 @@ public class IndexDeprecationChecks { static DeprecationIssue oldIndicesCheck(IndexMetadata indexMetadata) { // TODO: this check needs to be revised. It's trivially true right now. IndexVersion currentCompatibilityVersion = indexMetadata.getCompatibilityVersion(); - if (currentCompatibilityVersion.before(IndexVersion.V_7_0_0)) { + if (currentCompatibilityVersion.before(IndexVersions.V_7_0_0)) { return new DeprecationIssue( DeprecationIssue.Level.CRITICAL, "Old index with a compatibility version < 7.0", diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index f64dd18361d2a..abff1499fceb7 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.engine.frozen.FrozenEngine; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -159,7 +160,7 @@ public void testCamelCaseDeprecation() throws IOException { + "} }"; IndexMetadata simpleIndex = IndexMetadata.builder(randomAlphaOfLengthBetween(5, 10)) - .settings(settings(IndexVersion.V_7_0_0)) + .settings(settings(IndexVersions.V_7_0_0)) .numberOfShards(1) .numberOfReplicas(1) .putMapping(simpleMapping) diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java index cf234e31f1f7c..30fb751d1805c 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java @@ -419,7 +419,6 @@ private void prepareSourceIndex(String sourceIndex) { assertAcked( indicesAdmin().prepareUpdateSettings(sourceIndex) .setSettings(Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), true).build()) - .get() ); } diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java index 89f5cefb401c2..59a0cd34a1db0 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleTransportFailureIT.java @@ -32,7 +32,6 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.transport.MockTransportService; -import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; @@ -52,6 +51,8 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 2, numClientNodes = 1, supportsDedicatedMasters = false) public class DownsampleTransportFailureIT extends ESIntegTestCase { @@ -104,19 +105,8 @@ public Client masterClient() { return client(this.cluster.getMasterName()); } - public MockTransportService masterMockTransportService() { - return (MockTransportService) internalCluster().getInstance(TransportService.class, internalCluster().getMasterName()); - } - - public MockTransportService coordinatorMockTransportService() { - assert this.coordinator != null; - return (MockTransportService) internalCluster().getInstance(TransportService.class, this.coordinator); - } - public List allMockTransportServices() { - return Arrays.stream(cluster.getNodeNames()) - .map(nodeName -> (MockTransportService) internalCluster().getInstance(TransportService.class, nodeName)) - .collect(Collectors.toList()); + return Arrays.stream(cluster.getNodeNames()).map(MockTransportService::getInstance).toList(); } public String coordinatorName() { @@ -222,11 +212,9 @@ public void indexDocuments(final String indexName, final List documentsJ assertFalse(bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get().hasFailures()); } - public void blockIndexWrites(final String indexName) throws ExecutionException, InterruptedException { + public void blockIndexWrites(final String indexName) { final Settings blockWritesSetting = Settings.builder().put(IndexMetadata.SETTING_BLOCKS_WRITE, true).build(); - assertTrue( - client().admin().indices().updateSettings(new UpdateSettingsRequest(blockWritesSetting, indexName)).get().isAcknowledged() - ); + assertAcked(client().admin().indices().updateSettings(new UpdateSettingsRequest(blockWritesSetting, indexName))); } private void createTimeSeriesIndex(final String indexName) throws IOException { @@ -301,7 +289,7 @@ public void testNoDisruption() { public void testDownsampleActionExceptionDisruption() { // GIVEN - final MockTransportService coordinator = testCluster.coordinatorMockTransportService(); + final MockTransportService coordinator = MockTransportService.getInstance(testCluster.coordinator); final DownsampleAction.Request downsampleRequest = new DownsampleAction.Request( SOURCE_INDEX_NAME, TARGET_INDEX_NAME, diff --git a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java index 1e0f7d596abda..9bf580673df2e 100644 --- a/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java +++ b/x-pack/plugin/downsample/src/test/java/org/elasticsearch/xpack/downsample/DownsampleActionSingleNodeTests.java @@ -683,7 +683,6 @@ public void testDownsampleBulkFailed() throws IOException { indicesAdmin().preparePutTemplate(downsampleIndex) .setPatterns(List.of(downsampleIndex)) .setSettings(Settings.builder().put("index.blocks.write", "true").build()) - .get() ); ElasticsearchException exception = expectThrows(ElasticsearchException.class, indexer::execute); @@ -1047,14 +1046,12 @@ private void prepareSourceIndex(final String sourceIndex, boolean blockWrite) { assertAcked( indicesAdmin().prepareUpdateSettings(sourceIndex) .setSettings(Settings.builder().put(IndexMetadata.INDEX_BLOCKS_WRITE_SETTING.getKey(), blockWrite).build()) - .get() ); } private void downsample(String sourceIndex, String downsampleIndex, DownsampleConfig config) { assertAcked( client().execute(DownsampleAction.INSTANCE, new DownsampleAction.Request(sourceIndex, downsampleIndex, TIMEOUT, config)) - .actionGet() ); } diff --git a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java index 389ccd380a599..5416741c8743d 100644 --- a/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java +++ b/x-pack/plugin/enrich/src/internalClusterTest/java/org/elasticsearch/xpack/enrich/EnrichMultiNodeIT.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.enrich; -import org.apache.lucene.search.TotalHits; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskRequest; @@ -21,7 +20,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; @@ -46,12 +44,14 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.NodeRoles.ingestOnlyNode; import static org.elasticsearch.test.NodeRoles.masterOnlyNode; import static org.elasticsearch.test.NodeRoles.nonIngestNode; import static org.elasticsearch.test.NodeRoles.nonMasterNode; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -85,7 +85,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .build(); } - public void testEnrichAPIs() { + public void testEnrichAPIs() throws ExecutionException, InterruptedException { final int numPolicies = randomIntBetween(2, 4); internalCluster().startNodes(randomIntBetween(2, 3)); int numDocsInSourceIndex = randomIntBetween(8, 32); @@ -111,9 +111,7 @@ public void testEnrichAPIs() { assertThat(result, equalTo(new EnrichPolicy.NamedPolicy(policyName, enrichPolicy))); String enrichIndexPrefix = EnrichPolicy.getBaseName(policyName) + "*"; refresh(enrichIndexPrefix); - SearchResponse searchResponse = client().search(new SearchRequest(enrichIndexPrefix)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocsInSourceIndex)); + assertHitCount(client().search(new SearchRequest(enrichIndexPrefix)), numDocsInSourceIndex); } GetEnrichPolicyAction.Response response = client().execute(GetEnrichPolicyAction.INSTANCE, new GetEnrichPolicyAction.Request()) diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java index 5fb20a883560f..8e0c96c6ee245 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/EnrichPlugin.java @@ -8,13 +8,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -22,22 +19,14 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.ingest.Processor; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.XPackPlugin; @@ -189,44 +178,29 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { EnrichPolicyLocks enrichPolicyLocks = new EnrichPolicyLocks(); EnrichPolicyExecutor enrichPolicyExecutor = new EnrichPolicyExecutor( settings, - clusterService, - indicesService, - client, - threadPool, - expressionResolver, + services.clusterService(), + services.indicesService(), + services.client(), + services.threadPool(), + services.indexNameExpressionResolver(), enrichPolicyLocks, System::currentTimeMillis ); EnrichPolicyMaintenanceService enrichPolicyMaintenanceService = new EnrichPolicyMaintenanceService( settings, - client, - clusterService, - threadPool, + services.client(), + services.clusterService(), + services.threadPool(), enrichPolicyLocks ); enrichPolicyMaintenanceService.initialize(); return List.of( enrichPolicyLocks, - new EnrichCoordinatorProxyAction.Coordinator(client, settings), + new EnrichCoordinatorProxyAction.Coordinator(services.client(), settings), enrichPolicyMaintenanceService, enrichPolicyExecutor, enrichCache diff --git a/x-pack/plugin/ent-search/qa/rest/roles.yml b/x-pack/plugin/ent-search/qa/rest/roles.yml index 8d2ad43d02d08..4d868f41e78b3 100644 --- a/x-pack/plugin/ent-search/qa/rest/roles.yml +++ b/x-pack/plugin/ent-search/qa/rest/roles.yml @@ -21,7 +21,8 @@ user: "test-index1", "test-search-application", "test-search-application-1", - "test-search-application-with-list" + "test-search-application-with-list", + "test-search-application-with-list-invalid" ] privileges: [ "read" ] diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/52_search_application_render_query.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/52_search_application_render_query.yml index 3885b85ede686..7b4d0f16551d7 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/52_search_application_render_query.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/52_search_application_render_query.yml @@ -42,6 +42,44 @@ setup: field_name: field1 field_value: value1 + - do: + search_application.put: + name: test-search-application-with-list + body: + indices: [ "test-search-index1", "test-search-index2" ] + template: + script: + source: "{ \"query\": { \"multi_match\":{ \"query\": \"{{query_string}}\", \"fields\": [{{#text_fields}}\"{{name}}^{{boost}}\",{{/text_fields}}] } } }" + params: + query_string: "elastic" + text_fields: + - name: field1 + boost: 1 + - name: field2 + boost: 2 + - name: field3 + boost: 3 + lang: "mustache" + + - do: + search_application.put: + name: test-search-application-with-list-invalid + body: + indices: [ "test-search-index1", "test-search-index2" ] + template: + script: + source: "{ \"query\": { \"multi_match\":{ \"query\": \"{{query_string}}\", \"fields\": [{{#text_fields}}\"{{name}}^{{boost}}\"{{/text_fields}}] } } }" + params: + query_string: "elastic" + text_fields: + - name: field1 + boost: 1 + - name: field2 + boost: 2 + - name: field3 + boost: 3 + lang: "mustache" + - do: index: index: test-search-index1 @@ -67,6 +105,16 @@ teardown: name: test-search-application ignore: 404 + - do: + search_application.delete: + name: test-search-application-with-list + ignore: 404 + + - do: + search_application.delete: + name: test-search-application-with-list-invalid + ignore: 404 + - do: indices.delete: index: test-search-index1 @@ -141,6 +189,37 @@ teardown: } } } + +--- +"Render query for search application with a list of parameters": + + - do: + search_application.render_query: + name: test-search-application-with-list + body: + params: + query_string: value3 + text_fields: + - name: field1 + boost: 1 + - name: field2 + boost: 2 + - name: field3 + boost: 3 + + - match: { + query: { + multi_match: { + query: "value3", + fields: [ + "field1^1.0", + "field2^2.0", + "field3^3.0" + ] + } + } + } + --- "Render query for search application - not found": @@ -166,3 +245,20 @@ teardown: body: params: field_value: puggles + +--- +"Render search application query fails on invalid rendered JSON": + - do: + catch: "bad_request" + search_application.render_query: + name: test-search-application-with-list-invalid + body: + params: + query_string: value3 + text_fields: + - name: field1 + boost: 1 + - name: field2 + boost: 2 + - name: field3 + boost: 3 diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/55_search_application_search.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/55_search_application_search.yml index 7b0f60dd93cb3..42a356038ae68 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/55_search_application_search.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/55_search_application_search.yml @@ -72,6 +72,7 @@ setup: type: string field_value: type: string + - do: search_application.put: name: test-search-application-with-list @@ -91,6 +92,25 @@ setup: boost: 3 lang: "mustache" + - do: + search_application.put: + name: test-search-application-with-list-invalid + body: + indices: [ "test-search-index1", "test-search-index2" ] + template: + script: + source: "{ \"query\": { \"multi_match\":{ \"query\": \"{{query_string}}\", \"fields\": [{{#text_fields}}\"{{name}}^{{boost}}\"{{/text_fields}}] } } }" + params: + query_string: "elastic" + text_fields: + - name: field1 + boost: 1 + - name: field2 + boost: 2 + - name: field3 + boost: 3 + lang: "mustache" + - do: index: index: test-search-index1 @@ -126,6 +146,11 @@ teardown: name: test-search-application-with-list ignore: 404 + - do: + search_application.delete: + name: test-search-application-with-list-invalid + ignore: 404 + - do: indices.delete: index: test-search-index1 @@ -272,3 +297,24 @@ teardown: params: field_value: puggles +--- +"Search application search fails on invalid rendered JSON": + - skip: + features: headers + + - do: + catch: "bad_request" + headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + search_application.search: + name: test-search-application-with-list-invalid + body: + params: + query_string: value3 + text_fields: + - name: field1 + boost: 1 + - name: field2 + boost: 2 + - name: field3 + boost: 3 + diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index ad2c033b0ee0c..cfe6e65b20263 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -9,20 +9,13 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.logging.LogManager; @@ -31,14 +24,8 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.application.analytics.AnalyticsTemplateRegistry; import org.elasticsearch.xpack.application.analytics.action.DeleteAnalyticsCollectionAction; import org.elasticsearch.xpack.application.analytics.action.GetAnalyticsCollectionAction; @@ -197,41 +184,26 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { if (enabled == false) { return Collections.emptyList(); } // Behavioral analytics components final AnalyticsTemplateRegistry analyticsTemplateRegistry = new AnalyticsTemplateRegistry( - clusterService, - threadPool, - client, - xContentRegistry + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() ); analyticsTemplateRegistry.initialize(); // Connector components final ConnectorTemplateRegistry connectorTemplateRegistry = new ConnectorTemplateRegistry( - clusterService, - threadPool, - client, - xContentRegistry + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() ); connectorTemplateRegistry.initialize(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java index 1b776bd993398..d00fa191b49d0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java @@ -19,6 +19,7 @@ import java.util.Arrays; import java.util.List; +import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; @@ -31,16 +32,22 @@ public class BulkProcessorFactory { private final AnalyticsEventIngestConfig config; - private final Client client; + private final Supplier builderSupplier; @Inject public BulkProcessorFactory(Client client, AnalyticsEventIngestConfig config) { - this.client = new OriginSettingClient(client, ENT_SEARCH_ORIGIN); + Client originClient = new OriginSettingClient(client, ENT_SEARCH_ORIGIN); + this.builderSupplier = () -> BulkProcessor2.builder(originClient::bulk, new BulkProcessorListener(), originClient.threadPool()); + this.config = config; + } + + protected BulkProcessorFactory(AnalyticsEventIngestConfig config, Supplier builderSupplier) { + this.builderSupplier = builderSupplier; this.config = config; } public BulkProcessor2 create() { - return BulkProcessor2.builder(client::bulk, new BulkProcessorListener(), client.threadPool()) + return builderSupplier.get() .setMaxNumberOfRetries(config.maxNumberOfRetries()) .setBulkActions(config.maxNumberOfEventsPerBulk()) .setFlushInterval(config.flushDelay()) diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java index ccdde4a507756..e2be554f05890 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationTemplateService.java @@ -7,6 +7,9 @@ package org.elasticsearch.xpack.application.search; +import com.fasterxml.jackson.core.JsonLocation; +import com.fasterxml.jackson.core.JsonProcessingException; + import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.logging.LogManager; @@ -17,6 +20,8 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; @@ -38,13 +43,20 @@ public SearchApplicationTemplateService(ScriptService scriptService, NamedXConte } public SearchSourceBuilder renderQuery(SearchApplication searchApplication, Map templateParams) throws IOException, - ValidationException { + ValidationException, XContentParseException { final SearchApplicationTemplate template = searchApplication.searchApplicationTemplateOrDefault(); template.validateTemplateParams(templateParams); final Map renderedTemplateParams = renderTemplateParams(template, templateParams); final Script script = template.script(); TemplateScript compiledTemplate = scriptService.compile(script, TemplateScript.CONTEXT).newInstance(renderedTemplateParams); - final String requestSource = SearchTemplateHelper.stripTrailingComma(compiledTemplate.execute()); + String requestSource; + try { + requestSource = SearchTemplateHelper.stripTrailingComma(compiledTemplate.execute()); + } catch (JsonProcessingException e) { + JsonLocation loc = e.getLocation(); + throw new XContentParseException(new XContentLocation(loc.getLineNr(), loc.getColumnNr()), e.getMessage(), e); + } + XContentParserConfiguration parserConfig = XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, requestSource)) { diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java index f10480e9f64a7..2e181fda1ef88 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/LocalStateEnterpriseSearch.java @@ -9,31 +9,18 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.mockito.Mockito; @@ -102,38 +89,8 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - return entSearchPlugin.createComponents( - client, - clusterService, - threadPool, - resourceWatcherService, - scriptService, - xContentRegistry, - environment, - nodeEnvironment, - namedWriteableRegistry, - indexNameExpressionResolver, - repositoriesServiceSupplier, - telemetryProvider, - allocationService, - indicesService - ); + public Collection createComponents(PluginServices services) { + return entSearchPlugin.createComponents(services); } @Override diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactoryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactoryTests.java index 3a6899e06c54f..aac7a4212fb7d 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactoryTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactoryTests.java @@ -7,33 +7,23 @@ package org.elasticsearch.xpack.application.analytics.ingest; -import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkProcessor2; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.AfterClass; import org.junit.BeforeClass; -import org.mockito.InOrder; -import org.mockito.Mockito; import java.util.concurrent.TimeUnit; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.argThat; -import static org.mockito.Mockito.doAnswer; +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; public class BulkProcessorFactoryTests extends ESTestCase { @@ -49,83 +39,31 @@ public static void afterClass() { ThreadPool.terminate(testThreadPool, 30, TimeUnit.SECONDS); } - public void testFlushDelay() throws Exception { - AnalyticsEventIngestConfig config = mock(AnalyticsEventIngestConfig.class); - doReturn(ByteSizeValue.ofMb(10)).when(config).maxBytesInFlight(); - doReturn(TimeValue.timeValueSeconds(1)).when(config).flushDelay(); - doReturn(10).when(config).maxNumberOfEventsPerBulk(); - - Client client = mock(Client.class); - - doReturn(testThreadPool).when(client).threadPool(); - BulkProcessor2 bulkProcessor = new BulkProcessorFactory(client, config).create(); - IndexRequest indexRequest = mock(IndexRequest.class); - bulkProcessor.add(indexRequest); - - assertBusy(() -> verify(client).execute(any(BulkAction.class), argThat((BulkRequest bulkRequest) -> { - assertThat(bulkRequest.numberOfActions(), equalTo(1)); - assertThat(bulkRequest.requests().stream().findFirst().get(), equalTo(indexRequest)); - return true; - }), any()), 1, TimeUnit.SECONDS); + public void testDefaultConstructor() throws Exception { + BulkProcessorFactory factory = new BulkProcessorFactory(mock(Client.class), mock(AnalyticsEventIngestConfig.class)); + assertThat(factory.create(), instanceOf(BulkProcessor2.class)); } - public void testMaxBulkActions() throws InterruptedException { + public void testConfigValueAreUsed() throws Exception { + TimeValue flushDelay = TimeValue.parseTimeValue(randomTimeValue(), "random time value"); int maxBulkActions = randomIntBetween(1, 10); - int totalEvents = randomIntBetween(1, 5) * maxBulkActions + randomIntBetween(1, maxBulkActions); + int numberOfRetries = between(0, 5); + ByteSizeValue maxBytesInFlight = randomByteSizeValue(); AnalyticsEventIngestConfig config = mock(AnalyticsEventIngestConfig.class); + doReturn(flushDelay).when(config).flushDelay(); doReturn(maxBulkActions).when(config).maxNumberOfEventsPerBulk(); - doReturn(ByteSizeValue.ofMb(10)).when(config).maxBytesInFlight(); - - Client client = mock(Client.class); - InOrder inOrder = Mockito.inOrder(client); - - doReturn(testThreadPool).when(client).threadPool(); - BulkProcessor2 bulkProcessor = new BulkProcessorFactory(client, config).create(); - - for (int i = 0; i < totalEvents; i++) { - bulkProcessor.add(mock(IndexRequest.class)); - } - - inOrder.verify(client, times(totalEvents / maxBulkActions)).execute(any(BulkAction.class), argThat((BulkRequest bulkRequest) -> { - // Verify a bulk is executed immediately with maxNumberOfEventsPerBulk is reached. - assertThat(bulkRequest.numberOfActions(), equalTo(maxBulkActions)); - return true; - }), any()); - - bulkProcessor.awaitClose(1, TimeUnit.SECONDS); - - if (totalEvents % maxBulkActions > 0) { - inOrder.verify(client).execute(any(BulkAction.class), argThat((BulkRequest bulkRequest) -> { - // Verify another bulk with only 1 event (the remaining) is executed when closing the processor. - assertThat(bulkRequest.numberOfActions(), equalTo(totalEvents % maxBulkActions)); - return true; - }), any()); - } - } - - public void testMaxRetries() { - int numberOfRetries = between(0, 5); - AnalyticsEventIngestConfig config = mock(AnalyticsEventIngestConfig.class); - doReturn(1).when(config).maxNumberOfEventsPerBulk(); + doReturn(maxBytesInFlight).when(config).maxBytesInFlight(); doReturn(numberOfRetries).when(config).maxNumberOfRetries(); - doReturn(ByteSizeValue.ofMb(10)).when(config).maxBytesInFlight(); - Client client = mock(Client.class); - doAnswer(i -> { - i.getArgument(2, ActionListener.class).onFailure(new ElasticsearchStatusException("", RestStatus.TOO_MANY_REQUESTS)); - return null; - }).when(client).execute(any(), any(), any()); - doReturn(testThreadPool).when(client).threadPool(); - BulkProcessor2 bulkProcessor = new BulkProcessorFactory(client, config).create(); + BulkProcessor2.Builder baseBuilder = spy(BulkProcessor2.builder(mock(), new BulkProcessorFactory.BulkProcessorListener(), mock())); + BulkProcessorFactory factory = new BulkProcessorFactory(config, () -> baseBuilder); - IndexRequest indexRequest = mock(IndexRequest.class); - bulkProcessor.add(indexRequest); + assertThat(factory.create(), instanceOf(BulkProcessor2.class)); - verify(client, times(numberOfRetries + 1)).execute(any(BulkAction.class), argThat((BulkRequest bulkRequest) -> { - assertThat(bulkRequest.numberOfActions(), equalTo(1)); - assertThat(bulkRequest.requests().stream().findFirst().get(), equalTo(indexRequest)); - return true; - }), any()); + verify(baseBuilder).setFlushInterval(eq(flushDelay)); + verify(baseBuilder).setBulkActions(eq(maxBulkActions)); + verify(baseBuilder).setMaxNumberOfRetries(numberOfRetries); + verify(baseBuilder).setMaxBytesInFlight(maxBytesInFlight); } } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java index 2c18a866d684a..9ce62ee8d4c16 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexServiceTests.java @@ -41,6 +41,8 @@ public class QueryRulesIndexServiceTests extends ESSingleNodeTestCase { + private static final int REQUEST_TIMEOUT_SECONDS = 10; + private QueryRulesIndexService queryRulesIndexService; @Before @@ -212,11 +214,11 @@ public void onFailure(Exception e) { latch.countDown(); } }); - assertTrue(latch.await(5, TimeUnit.SECONDS)); + assertTrue("Timeout waiting for put request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); if (exc.get() != null) { throw exc.get(); } - assertNotNull(resp.get()); + assertNotNull("Received null response from put request", resp.get()); return resp.get(); } @@ -237,11 +239,11 @@ public void onFailure(Exception e) { latch.countDown(); } }); - assertTrue(latch.await(5, TimeUnit.SECONDS)); + assertTrue("Timeout waiting for get request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); if (exc.get() != null) { throw exc.get(); } - assertNotNull(resp.get()); + assertNotNull("Received null response from get request", resp.get()); return resp.get(); } @@ -262,11 +264,11 @@ public void onFailure(Exception e) { latch.countDown(); } }); - assertTrue(latch.await(5, TimeUnit.SECONDS)); + assertTrue("Timeout waiting for delete request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); if (exc.get() != null) { throw exc.get(); } - assertNotNull(resp.get()); + assertNotNull("Received null response from delete request", resp.get()); return resp.get(); } @@ -287,11 +289,11 @@ public void onFailure(Exception e) { latch.countDown(); } }); - assertTrue(latch.await(5, TimeUnit.SECONDS)); + assertTrue("Timeout waiting for list request", latch.await(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS)); if (exc.get() != null) { throw exc.get(); } - assertNotNull(resp.get()); + assertNotNull("Received null response from list request", resp.get()); return resp.get(); } diff --git a/x-pack/plugin/eql/qa/mixed-node/build.gradle b/x-pack/plugin/eql/qa/mixed-node/build.gradle index 3e1010e2c0eb9..8b9e082215fc4 100644 --- a/x-pack/plugin/eql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/eql/qa/mixed-node/build.gradle @@ -43,6 +43,8 @@ BuildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.10.0") && nonInputProperties.systemProperty('tests.rest.cluster', cluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) } + systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') + systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } diff --git a/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java b/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java index 5446e9c27a81a..d8b887b98e647 100644 --- a/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java +++ b/x-pack/plugin/eql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/eql/qa/mixed_node/EqlSearchIT.java @@ -49,6 +49,8 @@ */ public class EqlSearchIT extends ESRestTestCase { + private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); + private static final String index = "test_eql_mixed_versions"; private static int numShards; private static int numReplicas = 1; @@ -59,7 +61,7 @@ public class EqlSearchIT extends ESRestTestCase { @Before public void createIndex() throws IOException { - nodes = buildNodeAndVersions(client()); + nodes = buildNodeAndVersions(client(), BWC_NODES_VERSION); numShards = nodes.size(); numDocs = randomIntBetween(numShards, 15); newNodes = new ArrayList<>(nodes.getNewNodes()); diff --git a/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle b/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle index 5fc247693c453..47a405517a309 100644 --- a/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/eql/qa/multi-cluster-with-security/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' @@ -24,7 +25,7 @@ def integTestClusterReg = testClusters.register('javaRestTest') { setting 'xpack.watcher.enabled', 'false' setting 'cluster.remote.my_remote_cluster.seeds', { remoteClusterReg.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE setting 'cluster.remote.connections_per_cluster', "1" setting 'xpack.security.enabled', 'true' setting 'xpack.security.autoconfiguration.enabled', 'false' diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java index 194cb9be23c63..3a631c7724d09 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AsyncEqlSearchActionIT.java @@ -85,7 +85,6 @@ private void prepareIndex() throws Exception { assertAcked( indicesAdmin().prepareCreate("test") .setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date", "i", "type=integer") - .get() ); createIndex("idx_unmapped"); diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/EqlCancellationIT.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/EqlCancellationIT.java index c4e94b9867b08..3ec8d02befb54 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/EqlCancellationIT.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/EqlCancellationIT.java @@ -40,9 +40,7 @@ public void shutdownExec() { public void testCancellation() throws Exception { assertAcked( - indicesAdmin().prepareCreate("test") - .setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") - .get() + indicesAdmin().prepareCreate("test").setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") ); createIndex("idx_unmapped"); diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/RestEqlCancellationIT.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/RestEqlCancellationIT.java index 44b75311ee9ca..31f2a4e178c91 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/RestEqlCancellationIT.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/RestEqlCancellationIT.java @@ -60,9 +60,7 @@ protected Collection> nodePlugins() { public void testRestCancellation() throws Exception { assertAcked( - indicesAdmin().prepareCreate("test") - .setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") - .get() + indicesAdmin().prepareCreate("test").setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") ); createIndex("idx_unmapped"); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java index d65a81db11266..881cb083a48f2 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/EqlPlugin.java @@ -12,32 +12,21 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.BreakerSettings; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.CircuitBreakerPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -71,23 +60,8 @@ public class EqlPlugin extends Plugin implements ActionPlugin, CircuitBreakerPlu public EqlPlugin() {} @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - return createComponents(client, environment.settings(), clusterService); + public Collection createComponents(PluginServices services) { + return createComponents(services.client(), services.environment().settings(), services.clusterService()); } private Collection createComponents(Client client, Settings settings, ClusterService clusterService) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java index 0f686fd066805..e24a4749f45cd 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/plugin/RestEqlSearchAction.java @@ -6,13 +6,13 @@ */ package org.elasticsearch.xpack.eql.plugin; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -32,10 +32,11 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.ql.util.LoggingUtils.logOnFailure; @ServerlessScope(Scope.PUBLIC) public class RestEqlSearchAction extends BaseRestHandler { - private static Logger logger = LogManager.getLogger(RestEqlSearchAction.class); + private static final Logger LOGGER = LogManager.getLogger(RestEqlSearchAction.class); private static final String SEARCH_PATH = "/{index}/_eql/search"; @Override @@ -93,11 +94,12 @@ public void onFailure(Exception e) { finalException = new IndexNotFoundException(indices, infe.getCause()); } } + logOnFailure(LOGGER, finalException); try { channel.sendResponse(new RestResponse(channel, finalException)); } catch (Exception inner) { inner.addSuppressed(finalException); - logger.error("failed to send failure response", inner); + LOGGER.error("failed to send failure response", inner); } } }); diff --git a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Fixed.java b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Fixed.java index 286c36ab2314d..62703fa400ff7 100644 --- a/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Fixed.java +++ b/x-pack/plugin/esql/compute/ann/src/main/java/org/elasticsearch/compute/ann/Fixed.java @@ -11,6 +11,7 @@ import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +import java.util.function.Function; /** * Used on parameters on methods annotated with {@link Evaluator} to indicate @@ -20,5 +21,18 @@ @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.SOURCE) public @interface Fixed { + /** + * Should this attribute be in the Evaluator's {@code toString}? + */ boolean includeInToString() default true; + + /** + * Should the Evaluator's factory build this per evaluator with a + * {@code Function} or just take fixed implementation? + * This is typically set to {@code true} to use the {@link Function} + * to make "scratch" objects which have to be isolated in a single thread. + * This is typically set to {@code false} when the parameter is simply + * immutable and can be shared. + */ + boolean build() default false; } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 633af256257ad..8c5646ab56011 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -293,9 +293,7 @@ private MethodSpec intermediateBlockCount() { private MethodSpec addRawInput() { MethodSpec.Builder builder = MethodSpec.methodBuilder("addRawInput"); builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page"); - builder.addStatement("$T uncastBlock = page.getBlock(channels.get(0))", BLOCK); - builder.beginControlFlow("if (uncastBlock.areAllValuesNull())").addStatement("return").endControlFlow(); - builder.addStatement("$T block = ($T) uncastBlock", valueBlockType(init, combine), valueBlockType(init, combine)); + builder.addStatement("$T block = page.getBlock(channels.get(0))", valueBlockType(init, combine)); builder.addStatement("$T vector = block.asVector()", valueVectorType(init, combine)); builder.beginControlFlow("if (vector != null)").addStatement("addRawVector(vector)"); builder.nextControlFlow("else").addStatement("addRawBlock(block)").endControlFlow(); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java index e04158971334a..86ed41c57cb59 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConvertEvaluatorImplementer.java @@ -13,6 +13,10 @@ import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.TypeElement; @@ -25,6 +29,7 @@ import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; +import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR_FACTORY; import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.VECTOR; import static org.elasticsearch.compute.gen.Types.blockType; @@ -78,6 +83,7 @@ private TypeSpec type() { builder.addMethod(evalValue(true)); builder.addMethod(evalBlock()); builder.addMethod(evalValue(false)); + builder.addType(factory()); return builder.build(); } @@ -259,4 +265,49 @@ private MethodSpec evalValue(boolean forVector) { return builder.build(); } + + private TypeSpec factory() { + TypeSpec.Builder builder = TypeSpec.classBuilder("Factory"); + builder.addSuperinterface(EXPRESSION_EVALUATOR_FACTORY); + builder.addModifiers(Modifier.PUBLIC, Modifier.STATIC); + + builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); + builder.addField(EXPRESSION_EVALUATOR_FACTORY, "field", Modifier.PRIVATE, Modifier.FINAL); + + builder.addMethod(factoryCtor()); + builder.addMethod(factoryGet()); + builder.addMethod(factoryToString()); + return builder.build(); + } + + private MethodSpec factoryCtor() { + MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); + builder.addParameter(EXPRESSION_EVALUATOR_FACTORY, "field"); + builder.addParameter(SOURCE, "source"); + builder.addStatement("this.field = field"); + builder.addStatement("this.source = source"); + return builder.build(); + } + + private MethodSpec factoryGet() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("get").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC); + builder.addParameter(DRIVER_CONTEXT, "context"); + builder.returns(implementation); + + List args = new ArrayList<>(); + args.add("field.get(context)"); + args.add("source"); + args.add("context"); + builder.addStatement("return new $T($L)", implementation, args.stream().collect(Collectors.joining(", "))); + return builder.build(); + } + + private MethodSpec factoryToString() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("toString").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC); + builder.returns(String.class); + builder.addStatement("return $S + field + $S", declarationType.getSimpleName() + extraName + "Evaluator[field=", "]"); + return builder.build(); + } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java index 8474993b99583..d7ba99ccbc8e9 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/EvaluatorImplementer.java @@ -11,6 +11,7 @@ import com.squareup.javapoet.ClassName; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.ParameterizedTypeName; import com.squareup.javapoet.TypeName; import com.squareup.javapoet.TypeSpec; @@ -19,6 +20,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.function.Function; import java.util.stream.Collectors; import javax.lang.model.element.ExecutableElement; @@ -36,6 +38,7 @@ import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; +import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR_FACTORY; import static org.elasticsearch.compute.gen.Types.PAGE; import static org.elasticsearch.compute.gen.Types.RELEASABLE; import static org.elasticsearch.compute.gen.Types.RELEASABLES; @@ -82,6 +85,8 @@ private TypeSpec type() { builder.addModifiers(Modifier.PUBLIC, Modifier.FINAL); builder.addSuperinterface(EXPRESSION_EVALUATOR); + builder.addType(factory()); + if (processFunction.warnExceptions.isEmpty() == false) { builder.addField(WARNINGS, "warnings", Modifier.PRIVATE, Modifier.FINAL); } @@ -106,6 +111,7 @@ private MethodSpec ctor() { builder.addStatement("this.warnings = new Warnings(source)"); } processFunction.args.stream().forEach(a -> a.implementCtor(builder)); + builder.addParameter(DRIVER_CONTEXT, "driverContext"); builder.addStatement("this.driverContext = driverContext"); return builder.build(); @@ -247,6 +253,55 @@ private MethodSpec close() { return builder.build(); } + private TypeSpec factory() { + TypeSpec.Builder builder = TypeSpec.classBuilder("Factory"); + builder.addSuperinterface(EXPRESSION_EVALUATOR_FACTORY); + builder.addModifiers(Modifier.STATIC); + + if (processFunction.warnExceptions.isEmpty() == false) { + builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); + } + processFunction.args.stream().forEach(a -> a.declareFactoryField(builder)); + + builder.addMethod(factoryCtor()); + builder.addMethod(factoryGet()); + builder.addMethod(toStringMethod()); + + return builder.build(); + } + + private MethodSpec factoryCtor() { + MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); + if (processFunction.warnExceptions.isEmpty() == false) { + builder.addParameter(SOURCE, "source"); + builder.addStatement("this.source = source"); + } + processFunction.args.stream().forEach(a -> a.implementFactoryCtor(builder)); + + return builder.build(); + } + + private MethodSpec factoryGet() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("get").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC); + builder.addParameter(DRIVER_CONTEXT, "context"); + builder.returns(implementation); + + List args = new ArrayList<>(); + if (processFunction.warnExceptions.isEmpty() == false) { + args.add("source"); + } + for (ProcessFunctionArg arg : processFunction.args) { + String invocation = arg.factoryInvocation(builder); + if (invocation != null) { + args.add(invocation); + } + } + args.add("context"); + builder.addStatement("return new $T($L)", implementation, args.stream().collect(Collectors.joining(", "))); + return builder.build(); + } + private interface ProcessFunctionArg { /** * Type containing the actual data for a page of values for this field. Usually a @@ -260,16 +315,34 @@ private interface ProcessFunctionArg { String paramName(boolean blockStyle); /** - * Declare any required fields on the type for this parameter. + * Declare any required fields for the evaluator to implement this type of parameter. */ void declareField(TypeSpec.Builder builder); + /** + * Declare any required fields for the evaluator factory to implement this type of parameter. + */ + void declareFactoryField(TypeSpec.Builder builder); + /** * Implement the ctor for this parameter. Will declare parameters * and assign values to declared fields. */ void implementCtor(MethodSpec.Builder builder); + /** + * Implement the ctor for the evaluator factory for this parameter. + * Will declare parameters and assign values to declared fields. + */ + void implementFactoryCtor(MethodSpec.Builder builder); + + /** + * Invocation called in the ExpressionEvaluator.Factory#get method to + * convert from whatever the factory holds to what the evaluator needs, + * or {@code null} this parameter isn't passed to the evaluator's ctor. + */ + String factoryInvocation(MethodSpec.Builder factoryMethodBuilder); + /** * Emits code to evaluate this parameter to a Block.Ref or array of Block.Refs * and begins a {@code try} block for those refs. Noop if the parameter is {@link Fixed}. @@ -340,21 +413,32 @@ public void declareField(TypeSpec.Builder builder) { builder.addField(EXPRESSION_EVALUATOR, name, Modifier.PRIVATE, Modifier.FINAL); } + @Override + public void declareFactoryField(TypeSpec.Builder builder) { + builder.addField(EXPRESSION_EVALUATOR_FACTORY, name, Modifier.PRIVATE, Modifier.FINAL); + } + @Override public void implementCtor(MethodSpec.Builder builder) { builder.addParameter(EXPRESSION_EVALUATOR, name); builder.addStatement("this.$L = $L", name, name); } + @Override + public void implementFactoryCtor(MethodSpec.Builder builder) { + builder.addParameter(EXPRESSION_EVALUATOR_FACTORY, name); + builder.addStatement("this.$L = $L", name, name); + } + + @Override + public String factoryInvocation(MethodSpec.Builder factoryMethodBuilder) { + return name + ".get(context)"; + } + @Override public void evalToBlock(MethodSpec.Builder builder) { TypeName blockType = blockType(type); builder.beginControlFlow("try (Block.Ref $LRef = $L.eval(page))", name, name); - builder.beginControlFlow("if ($LRef.block().areAllValuesNull())", name); - builder.addStatement( - "return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory()))" - ); - builder.endControlFlow(); builder.addStatement("$T $LBlock = ($T) $LRef.block()", blockType, name, blockType, name); } @@ -443,12 +527,35 @@ public void declareField(TypeSpec.Builder builder) { builder.addField(ArrayTypeName.of(EXPRESSION_EVALUATOR), name, Modifier.PRIVATE, Modifier.FINAL); } + @Override + public void declareFactoryField(TypeSpec.Builder builder) { + builder.addField(ArrayTypeName.of(EXPRESSION_EVALUATOR_FACTORY), name, Modifier.PRIVATE, Modifier.FINAL); + } + @Override public void implementCtor(MethodSpec.Builder builder) { builder.addParameter(ArrayTypeName.of(EXPRESSION_EVALUATOR), name); builder.addStatement("this.$L = $L", name, name); } + @Override + public void implementFactoryCtor(MethodSpec.Builder builder) { + builder.addParameter(ArrayTypeName.of(EXPRESSION_EVALUATOR_FACTORY), name); + builder.addStatement("this.$L = $L", name, name); + } + + @Override + public String factoryInvocation(MethodSpec.Builder factoryMethodBuilder) { + factoryMethodBuilder.addStatement( + "$T[] $L = Arrays.stream(this.$L).map(a -> a.get(context)).toArray($T[]::new)", + EXPRESSION_EVALUATOR, + name, + name, + EXPRESSION_EVALUATOR + ); + return name; + } + @Override public void evalToBlock(MethodSpec.Builder builder) { TypeName blockType = blockType(componentType); @@ -458,13 +565,7 @@ public void evalToBlock(MethodSpec.Builder builder) { builder.beginControlFlow("for (int i = 0; i < $LBlocks.length; i++)", name); { builder.addStatement("$LRefs[i] = $L[i].eval(page)", name, name); - builder.addStatement("Block block = $LRefs[i].block()", name); - builder.beginControlFlow("if (block.areAllValuesNull())"); - builder.addStatement( - "return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory()))" - ); - builder.endControlFlow(); - builder.addStatement("$LBlocks[i] = ($T) block", name, blockType); + builder.addStatement("$LBlocks[i] = ($T) $LRefs[i].block()", name, blockType, name); } builder.endControlFlow(); } @@ -541,7 +642,7 @@ public String closeInvocation() { } } - private record FixedProcessFunctionArg(TypeName type, String name, boolean includeInToString, boolean releasable) + private record FixedProcessFunctionArg(TypeName type, String name, boolean includeInToString, boolean build, boolean releasable) implements ProcessFunctionArg { @Override @@ -560,12 +661,32 @@ public void declareField(TypeSpec.Builder builder) { builder.addField(type, name, Modifier.PRIVATE, Modifier.FINAL); } + @Override + public void declareFactoryField(TypeSpec.Builder builder) { + builder.addField(factoryFieldType(), name, Modifier.PRIVATE, Modifier.FINAL); + } + @Override public void implementCtor(MethodSpec.Builder builder) { builder.addParameter(type, name); builder.addStatement("this.$L = $L", name, name); } + @Override + public void implementFactoryCtor(MethodSpec.Builder builder) { + builder.addParameter(factoryFieldType(), name); + builder.addStatement("this.$L = $L", name, name); + } + + private TypeName factoryFieldType() { + return build ? ParameterizedTypeName.get(ClassName.get(Function.class), DRIVER_CONTEXT, type.box()) : type; + } + + @Override + public String factoryInvocation(MethodSpec.Builder factoryMethodBuilder) { + return build ? name + ".apply(context)" : name; + } + @Override public void evalToBlock(MethodSpec.Builder builder) { // nothing to do @@ -634,11 +755,26 @@ public void declareField(TypeSpec.Builder builder) { // Nothing to declare } + @Override + public void declareFactoryField(TypeSpec.Builder builder) { + // Nothing to declare + } + @Override public void implementCtor(MethodSpec.Builder builder) { // Nothing to do } + @Override + public void implementFactoryCtor(MethodSpec.Builder builder) { + // Nothing to do + } + + @Override + public String factoryInvocation(MethodSpec.Builder factoryMethodBuilder) { + return null; // Not used in the factory + } + @Override public void evalToBlock(MethodSpec.Builder builder) { // nothing to do @@ -711,6 +847,7 @@ private ProcessFunction( type, name, fixed.includeInToString(), + fixed.build(), Types.extendsSuper(types, v.asType(), "org.elasticsearch.core.Releasable") ) ); diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java index e357f057c13f2..b84ab64f4f54c 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/GroupingAggregatorImplementer.java @@ -36,7 +36,6 @@ import static org.elasticsearch.compute.gen.Methods.findMethod; import static org.elasticsearch.compute.gen.Methods.findRequiredMethod; import static org.elasticsearch.compute.gen.Methods.vectorAccessorName; -import static org.elasticsearch.compute.gen.Types.BLOCK; import static org.elasticsearch.compute.gen.Types.BLOCK_ARRAY; import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; @@ -250,16 +249,7 @@ private MethodSpec prepareProcessPage() { builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).returns(GROUPING_AGGREGATOR_FUNCTION_ADD_INPUT); builder.addParameter(SEEN_GROUP_IDS, "seenGroupIds").addParameter(PAGE, "page"); - builder.addStatement("$T uncastValuesBlock = page.getBlock(channels.get(0))", BLOCK); - - builder.beginControlFlow("if (uncastValuesBlock.areAllValuesNull())"); - { - builder.addStatement("state.enableGroupIdTracking(seenGroupIds)"); - builder.addStatement("return $L", addInput(b -> {})); - } - builder.endControlFlow(); - - builder.addStatement("$T valuesBlock = ($T) uncastValuesBlock", valueBlockType(init, combine), valueBlockType(init, combine)); + builder.addStatement("$T valuesBlock = page.getBlock(channels.get(0))", valueBlockType(init, combine)); builder.addStatement("$T valuesVector = valuesBlock.asVector()", valueVectorType(init, combine)); builder.beginControlFlow("if (valuesVector == null)"); { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java index c86f3d82c0015..3a97b44634bd3 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/MvEvaluatorImplementer.java @@ -35,6 +35,7 @@ import static org.elasticsearch.compute.gen.Types.BYTES_REF; import static org.elasticsearch.compute.gen.Types.DRIVER_CONTEXT; import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR; +import static org.elasticsearch.compute.gen.Types.EXPRESSION_EVALUATOR_FACTORY; import static org.elasticsearch.compute.gen.Types.SOURCE; import static org.elasticsearch.compute.gen.Types.WARNINGS; import static org.elasticsearch.compute.gen.Types.blockType; @@ -145,6 +146,8 @@ private TypeSpec type() { builder.addMethod(evalAscending("evalAscendingNullable", true)); builder.addMethod(evalAscending("evalAscendingNotNullable", false)); } + + builder.addType(factory()); return builder.build(); } @@ -349,6 +352,59 @@ private void writeResult(MethodSpec.Builder builder) { } } + private TypeSpec factory() { + TypeSpec.Builder builder = TypeSpec.classBuilder("Factory"); + builder.addSuperinterface(EXPRESSION_EVALUATOR_FACTORY); + builder.addModifiers(Modifier.PUBLIC, Modifier.STATIC); + + if (warnExceptions.isEmpty() == false) { + builder.addField(SOURCE, "source", Modifier.PRIVATE, Modifier.FINAL); + } + builder.addField(EXPRESSION_EVALUATOR_FACTORY, "field", Modifier.PRIVATE, Modifier.FINAL); + + builder.addMethod(factoryCtor()); + builder.addMethod(factoryGet()); + builder.addMethod(factoryToString()); + return builder.build(); + } + + private MethodSpec factoryCtor() { + MethodSpec.Builder builder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); + if (warnExceptions.isEmpty() == false) { + builder.addParameter(SOURCE, "source"); + } + builder.addParameter(EXPRESSION_EVALUATOR_FACTORY, "field"); + if (warnExceptions.isEmpty() == false) { + builder.addStatement("this.source = source"); + } + builder.addStatement("this.field = field"); + return builder.build(); + } + + private MethodSpec factoryGet() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("get").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC); + builder.addParameter(DRIVER_CONTEXT, "context"); + builder.returns(implementation); + + List args = new ArrayList<>(); + if (warnExceptions.isEmpty() == false) { + args.add("source"); + } + args.add("field.get(context)"); + args.add("context"); + builder.addStatement("return new $T($L)", implementation, args.stream().collect(Collectors.joining(", "))); + return builder.build(); + } + + private MethodSpec factoryToString() { + MethodSpec.Builder builder = MethodSpec.methodBuilder("toString").addAnnotation(Override.class); + builder.addModifiers(Modifier.PUBLIC); + builder.returns(String.class); + builder.addStatement("return $S + field + $S", declarationType.getSimpleName() + "[field=", "]"); + return builder.build(); + } + /** * Function "finishing" the computation on a multivalued field. It converts {@link #workType} into {@link #resultType}. */ diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java index 46fb6af22e79b..c1802b671f2a6 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Types.java @@ -96,6 +96,7 @@ public class Types { static final ClassName DRIVER_CONTEXT = ClassName.get(OPERATOR_PACKAGE, "DriverContext"); static final ClassName EXPRESSION_EVALUATOR = ClassName.get(OPERATOR_PACKAGE, "EvalOperator", "ExpressionEvaluator"); + static final ClassName EXPRESSION_EVALUATOR_FACTORY = ClassName.get(OPERATOR_PACKAGE, "EvalOperator", "ExpressionEvaluator", "Factory"); static final ClassName ABSTRACT_MULTIVALUE_FUNCTION_EVALUATOR = ClassName.get( "org.elasticsearch.xpack.esql.expression.function.scalar.multivalue", "AbstractMultivalueFunction", diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index f8f291c2a2e69..352ee783d8614 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -17,7 +18,7 @@ * Block that stores boolean values. * This class is generated. Do not edit it. */ -public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, BooleanVectorBlock { +public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, BooleanVectorBlock, ConstantNullBlock { /** * Retrieves the boolean value stored at the given value index. @@ -165,31 +166,52 @@ static int hash(BooleanBlock block) { return result; } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newBooleanBlockBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newBlockBuilder(int estimatedSize) { return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a builder. + * @deprecated use {@link BlockFactory#newBooleanBlockBuilder} + */ + @Deprecated static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newBooleanBlockBuilder(estimatedSize); } - /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstantBooleanBlockWith} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static BooleanBlock newConstantBlockWith(boolean value, int positions) { return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a constant block. + * @deprecated use {@link BlockFactory#newConstantBooleanBlockWith} + */ + @Deprecated static BooleanBlock newConstantBlockWith(boolean value, int positions, BlockFactory blockFactory) { return blockFactory.newConstantBooleanBlockWith(value, positions); } - sealed interface Builder extends Block.Builder permits BooleanBlockBuilder { - + /** + * Builder for {@link BooleanBlock} + */ + sealed interface Builder extends Block.Builder, BlockLoader.BooleanBuilder permits BooleanBlockBuilder { /** * Appends a boolean to the current entry. */ + @Override Builder appendBoolean(boolean value); /** @@ -213,12 +235,11 @@ sealed interface Builder extends Block.Builder permits BooleanBlockBuilder { @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - // TODO boolean containsMvDups(); - /** * Appends the all values of the given block into a the current position * in this builder. */ + @Override Builder appendAllValuesToCurrentPosition(Block block); /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index 9a4cc64d760ef..ec4ab8f7def1c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -16,7 +16,8 @@ * Vector that stores boolean values. * This class is generated. Do not edit it. */ -public sealed interface BooleanVector extends Vector permits ConstantBooleanVector, BooleanArrayVector, BooleanBigArrayVector { +public sealed interface BooleanVector extends Vector permits ConstantBooleanVector, BooleanArrayVector, BooleanBigArrayVector, + ConstantNullVector { boolean getBoolean(int position); @Override @@ -100,8 +101,12 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. + * @deprecated use {@link BlockFactory#newBooleanVectorBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newVectorBuilder(int estimatedSize) { return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } @@ -109,7 +114,9 @@ static Builder newVectorBuilder(int estimatedSize) { /** * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newBooleanVectorBuilder} */ + @Deprecated static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newBooleanVectorBuilder(estimatedSize); } @@ -117,7 +124,9 @@ static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { /** * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newBooleanVectorFixedBuilder} */ + @Deprecated static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { return blockFactory.newBooleanVectorFixedBuilder(size); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 488d3032b2b08..50611f3e15130 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -18,7 +19,7 @@ * Block that stores BytesRef values. * This class is generated. Do not edit it. */ -public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock { +public sealed interface BytesRefBlock extends Block permits BytesRefArrayBlock, BytesRefVectorBlock, ConstantNullBlock { BytesRef NULL_VALUE = new BytesRef(); @@ -170,31 +171,52 @@ static int hash(BytesRefBlock block) { return result; } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newBytesRefBlockBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newBlockBuilder(int estimatedSize) { return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a builder. + * @deprecated use {@link BlockFactory#newBytesRefBlockBuilder} + */ + @Deprecated static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newBytesRefBlockBuilder(estimatedSize); } - /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstantBytesRefBlockWith} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static BytesRefBlock newConstantBlockWith(BytesRef value, int positions) { return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a constant block. + * @deprecated use {@link BlockFactory#newConstantBytesRefBlockWith} + */ + @Deprecated static BytesRefBlock newConstantBlockWith(BytesRef value, int positions, BlockFactory blockFactory) { return blockFactory.newConstantBytesRefBlockWith(value, positions); } - sealed interface Builder extends Block.Builder permits BytesRefBlockBuilder { - + /** + * Builder for {@link BytesRefBlock} + */ + sealed interface Builder extends Block.Builder, BlockLoader.BytesRefBuilder permits BytesRefBlockBuilder { /** * Appends a BytesRef to the current entry. */ + @Override Builder appendBytesRef(BytesRef value); /** @@ -218,12 +240,11 @@ sealed interface Builder extends Block.Builder permits BytesRefBlockBuilder { @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - // TODO boolean containsMvDups(); - /** * Appends the all values of the given block into a the current position * in this builder. */ + @Override Builder appendAllValuesToCurrentPosition(Block block); /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index 82a18f5d5b79e..b7011666b981d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -17,7 +17,7 @@ * Vector that stores BytesRef values. * This class is generated. Do not edit it. */ -public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector { +public sealed interface BytesRefVector extends Vector permits ConstantBytesRefVector, BytesRefArrayVector, ConstantNullVector { BytesRef getBytesRef(int position, BytesRef dest); @Override @@ -101,15 +101,21 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. + * @deprecated use {@link BlockFactory#newBytesRefVectorBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newVectorBuilder(int estimatedSize) { return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } /** * Creates a builder that grows as needed. + * @deprecated use {@link BlockFactory#newBytesRefVectorBuilder} */ + @Deprecated static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newBytesRefVectorBuilder(estimatedSize); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index c2e63a0c6f384..31d0000d28515 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -17,7 +18,7 @@ * Block that stores double values. * This class is generated. Do not edit it. */ -public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, DoubleVectorBlock { +public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, DoubleVectorBlock, ConstantNullBlock { /** * Retrieves the double value stored at the given value index. @@ -166,31 +167,52 @@ static int hash(DoubleBlock block) { return result; } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newDoubleBlockBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newBlockBuilder(int estimatedSize) { return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a builder. + * @deprecated use {@link BlockFactory#newDoubleBlockBuilder} + */ + @Deprecated static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newDoubleBlockBuilder(estimatedSize); } - /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstantDoubleBlockWith} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static DoubleBlock newConstantBlockWith(double value, int positions) { return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a constant block. + * @deprecated use {@link BlockFactory#newConstantDoubleBlockWith} + */ + @Deprecated static DoubleBlock newConstantBlockWith(double value, int positions, BlockFactory blockFactory) { return blockFactory.newConstantDoubleBlockWith(value, positions); } - sealed interface Builder extends Block.Builder permits DoubleBlockBuilder { - + /** + * Builder for {@link DoubleBlock} + */ + sealed interface Builder extends Block.Builder, BlockLoader.DoubleBuilder permits DoubleBlockBuilder { /** * Appends a double to the current entry. */ + @Override Builder appendDouble(double value); /** @@ -214,12 +236,11 @@ sealed interface Builder extends Block.Builder permits DoubleBlockBuilder { @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - // TODO boolean containsMvDups(); - /** * Appends the all values of the given block into a the current position * in this builder. */ + @Override Builder appendAllValuesToCurrentPosition(Block block); /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index 545d17004333a..acabd0deb17f6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -16,7 +16,8 @@ * Vector that stores double values. * This class is generated. Do not edit it. */ -public sealed interface DoubleVector extends Vector permits ConstantDoubleVector, DoubleArrayVector, DoubleBigArrayVector { +public sealed interface DoubleVector extends Vector permits ConstantDoubleVector, DoubleArrayVector, DoubleBigArrayVector, + ConstantNullVector { double getDouble(int position); @Override @@ -101,8 +102,12 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. + * @deprecated use {@link BlockFactory#newDoubleVectorBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newVectorBuilder(int estimatedSize) { return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } @@ -110,7 +115,9 @@ static Builder newVectorBuilder(int estimatedSize) { /** * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newDoubleVectorBuilder} */ + @Deprecated static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newDoubleVectorBuilder(estimatedSize); } @@ -118,7 +125,9 @@ static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { /** * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newDoubleVectorFixedBuilder} */ + @Deprecated static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { return blockFactory.newDoubleVectorFixedBuilder(size); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index f27e855809491..3909d2b6761be 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -17,7 +18,7 @@ * Block that stores int values. * This class is generated. Do not edit it. */ -public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorBlock { +public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorBlock, ConstantNullBlock { /** * Retrieves the int value stored at the given value index. @@ -165,31 +166,52 @@ static int hash(IntBlock block) { return result; } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newIntBlockBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newBlockBuilder(int estimatedSize) { return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a builder. + * @deprecated use {@link BlockFactory#newIntBlockBuilder} + */ + @Deprecated static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newIntBlockBuilder(estimatedSize); } - /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstantIntBlockWith} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static IntBlock newConstantBlockWith(int value, int positions) { return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a constant block. + * @deprecated use {@link BlockFactory#newConstantIntBlockWith} + */ + @Deprecated static IntBlock newConstantBlockWith(int value, int positions, BlockFactory blockFactory) { return blockFactory.newConstantIntBlockWith(value, positions); } - sealed interface Builder extends Block.Builder permits IntBlockBuilder { - + /** + * Builder for {@link IntBlock} + */ + sealed interface Builder extends Block.Builder, BlockLoader.IntBuilder permits IntBlockBuilder { /** * Appends a int to the current entry. */ + @Override Builder appendInt(int value); /** @@ -213,12 +235,11 @@ sealed interface Builder extends Block.Builder permits IntBlockBuilder { @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - // TODO boolean containsMvDups(); - /** * Appends the all values of the given block into a the current position * in this builder. */ + @Override Builder appendAllValuesToCurrentPosition(Block block); /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 6c3b46c3228e6..645288565c431 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -16,7 +16,7 @@ * Vector that stores int values. * This class is generated. Do not edit it. */ -public sealed interface IntVector extends Vector permits ConstantIntVector, IntArrayVector, IntBigArrayVector { +public sealed interface IntVector extends Vector permits ConstantIntVector, IntArrayVector, IntBigArrayVector, ConstantNullVector { int getInt(int position); @@ -101,8 +101,12 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. + * @deprecated use {@link BlockFactory#newIntVectorBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newVectorBuilder(int estimatedSize) { return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } @@ -110,7 +114,9 @@ static Builder newVectorBuilder(int estimatedSize) { /** * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newIntVectorBuilder} */ + @Deprecated static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newIntVectorBuilder(estimatedSize); } @@ -118,7 +124,9 @@ static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { /** * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newIntVectorFixedBuilder} */ + @Deprecated static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { return blockFactory.newIntVectorFixedBuilder(size); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 287b55eac3d04..41ac8f7237f64 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -17,7 +18,7 @@ * Block that stores long values. * This class is generated. Do not edit it. */ -public sealed interface LongBlock extends Block permits LongArrayBlock, LongVectorBlock { +public sealed interface LongBlock extends Block permits LongArrayBlock, LongVectorBlock, ConstantNullBlock { /** * Retrieves the long value stored at the given value index. @@ -166,31 +167,52 @@ static int hash(LongBlock block) { return result; } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newLongBlockBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newBlockBuilder(int estimatedSize) { return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a builder. + * @deprecated use {@link BlockFactory#newLongBlockBuilder} + */ + @Deprecated static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newLongBlockBuilder(estimatedSize); } - /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstantLongBlockWith} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static LongBlock newConstantBlockWith(long value, int positions) { return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a constant block. + * @deprecated use {@link BlockFactory#newConstantLongBlockWith} + */ + @Deprecated static LongBlock newConstantBlockWith(long value, int positions, BlockFactory blockFactory) { return blockFactory.newConstantLongBlockWith(value, positions); } - sealed interface Builder extends Block.Builder permits LongBlockBuilder { - + /** + * Builder for {@link LongBlock} + */ + sealed interface Builder extends Block.Builder, BlockLoader.LongBuilder permits LongBlockBuilder { /** * Appends a long to the current entry. */ + @Override Builder appendLong(long value); /** @@ -214,12 +236,11 @@ sealed interface Builder extends Block.Builder permits LongBlockBuilder { @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - // TODO boolean containsMvDups(); - /** * Appends the all values of the given block into a the current position * in this builder. */ + @Override Builder appendAllValuesToCurrentPosition(Block block); /** diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index 44e81139adccf..a312d7aeab0cc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -16,7 +16,7 @@ * Vector that stores long values. * This class is generated. Do not edit it. */ -public sealed interface LongVector extends Vector permits ConstantLongVector, LongArrayVector, LongBigArrayVector { +public sealed interface LongVector extends Vector permits ConstantLongVector, LongArrayVector, LongBigArrayVector, ConstantNullVector { long getLong(int position); @@ -102,8 +102,12 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. + * @deprecated use {@link BlockFactory#newLongVectorBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newVectorBuilder(int estimatedSize) { return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } @@ -111,7 +115,9 @@ static Builder newVectorBuilder(int estimatedSize) { /** * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newLongVectorBuilder} */ + @Deprecated static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newLongVectorBuilder(estimatedSize); } @@ -119,7 +125,9 @@ static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { /** * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#newLongVectorFixedBuilder} */ + @Deprecated static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { return blockFactory.newLongVectorFixedBuilder(size); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java index a081166ed8bf5..f0344ec6b56ba 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -54,11 +54,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - BooleanBlock block = (BooleanBlock) uncastBlock; + BooleanBlock block = page.getBlock(channels.get(0)); BooleanVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java index 820cb889d2ec6..adc09f8bba828 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanGroupingAggregatorFunction.java @@ -62,20 +62,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - BooleanBlock valuesBlock = (BooleanBlock) uncastValuesBlock; + BooleanBlock valuesBlock = page.getBlock(channels.get(0)); BooleanVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java index f63144ea3c72f..15d3290cdb18f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -61,11 +61,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - BytesRefBlock block = (BytesRefBlock) uncastBlock; + BytesRefBlock block = page.getBlock(channels.get(0)); BytesRefVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java index 36bcb6d145c05..46cfbcc99a373 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefGroupingAggregatorFunction.java @@ -65,20 +65,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - BytesRefBlock valuesBlock = (BytesRefBlock) uncastValuesBlock; + BytesRefBlock valuesBlock = page.getBlock(channels.get(0)); BytesRefVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java index 4e7893d74ca78..77e47b543bd26 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -63,11 +63,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java index 54c57c2138505..84ed6d2f06329 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleGroupingAggregatorFunction.java @@ -67,20 +67,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java index 091379cd2145f..1f6f91d4d6adc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -63,11 +63,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; + IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java index acaff7b8e96d7..b5dc7e43467a8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntGroupingAggregatorFunction.java @@ -65,20 +65,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java index 5a85b4132b4ef..cf51b3dcff5a1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -63,11 +63,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; + LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java index 06165026a5d16..6c69845dbf107 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongGroupingAggregatorFunction.java @@ -67,20 +67,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java index e5dfb17bdf819..c613623230e7f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java index b761bb66b8edb..86ada78737512 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleGroupingAggregatorFunction.java @@ -59,20 +59,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java index 1f1f0981ec4cd..9307522e515a8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; + IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java index a7ef8d5573fdc..d0809b2a6853c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntGroupingAggregatorFunction.java @@ -57,20 +57,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java index 0e527fcefce79..6902539b8b2f0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; + LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java index d224ecd2d293e..5b28fa01ef0ce 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongGroupingAggregatorFunction.java @@ -59,20 +59,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java index 075ee7660adce..d7b84492c4cbc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java index 5707f9942d0f8..08bbde35e592e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleGroupingAggregatorFunction.java @@ -63,20 +63,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java index 3927456023ffa..211f0f622b728 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; + IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java index b8f08efffd7d1..cad7e172e67bb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntGroupingAggregatorFunction.java @@ -61,20 +61,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java index 3123d102a1a72..2311ce3c18315 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; + LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java index 74ee25c27c86a..dae97d17db711 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongGroupingAggregatorFunction.java @@ -63,20 +63,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java index d40f2a0622974..afcace2069ebf 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java index d7c2c59eaed2c..4d0e78a26865d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleGroupingAggregatorFunction.java @@ -59,20 +59,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java index 5b28da79797b6..e8deaf9cf07fd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; + IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java index bf0c4c5a5e02c..97b5eafa9e72e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntGroupingAggregatorFunction.java @@ -57,20 +57,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java index 2b6bf88e0880d..61f41c9693725 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; + LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java index d7baa88d6da26..7e68a4b933841 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongGroupingAggregatorFunction.java @@ -59,20 +59,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java index 2e1c36a78cd67..0757cb91b2747 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -59,11 +59,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java index 63613daa47ad1..232287356174c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleGroupingAggregatorFunction.java @@ -67,20 +67,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java index 936ffaba2e0ae..142772592a9a8 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -59,11 +59,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; + IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java index 38911d7a02f05..1b4e49fa1e040 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntGroupingAggregatorFunction.java @@ -65,20 +65,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java index 9ca64b4ee8137..d360f14453ce5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -59,11 +59,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; + LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java index efb512e84f1f5..1382aa8b27331 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongGroupingAggregatorFunction.java @@ -67,20 +67,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java index e9f9c0e1c15de..48c50b026f198 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -57,11 +57,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - DoubleBlock block = (DoubleBlock) uncastBlock; + DoubleBlock block = page.getBlock(channels.get(0)); DoubleVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java index 6094c7115159f..6e2207ca069cd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleGroupingAggregatorFunction.java @@ -65,20 +65,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - DoubleBlock valuesBlock = (DoubleBlock) uncastValuesBlock; + DoubleBlock valuesBlock = page.getBlock(channels.get(0)); DoubleVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java index a2656c7197643..f834c932b0a56 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -58,11 +58,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - IntBlock block = (IntBlock) uncastBlock; + IntBlock block = page.getBlock(channels.get(0)); IntVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java index d0c9ecb6ac84a..373a7af4b3d67 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntGroupingAggregatorFunction.java @@ -59,20 +59,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - IntBlock valuesBlock = (IntBlock) uncastValuesBlock; + IntBlock valuesBlock = page.getBlock(channels.get(0)); IntVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java index 1cf9fdddea876..058fdcbe507b4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -56,11 +56,7 @@ public int intermediateBlockCount() { @Override public void addRawInput(Page page) { - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { - return; - } - LongBlock block = (LongBlock) uncastBlock; + LongBlock block = page.getBlock(channels.get(0)); LongVector vector = block.asVector(); if (vector != null) { addRawVector(vector); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java index 9df3cd2d76cae..46e993b49c666 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongGroupingAggregatorFunction.java @@ -59,20 +59,7 @@ public int intermediateBlockCount() { @Override public GroupingAggregatorFunction.AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { - Block uncastValuesBlock = page.getBlock(channels.get(0)); - if (uncastValuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new GroupingAggregatorFunction.AddInput() { - @Override - public void add(int positionOffset, IntBlock groupIds) { - } - - @Override - public void add(int positionOffset, IntVector groupIds) { - } - }; - } - LongBlock valuesBlock = (LongBlock) uncastValuesBlock; + LongBlock valuesBlock = page.getBlock(channels.get(0)); LongVector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java index 8108ab996936b..995dc5e15740f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java @@ -61,16 +61,6 @@ public int intermediateBlockCount() { public AddInput prepareProcessPage(SeenGroupIds seenGroupIds, Page page) { Block valuesBlock = page.getBlock(blockIndex()); if (countAll == false) { - if (valuesBlock.areAllValuesNull()) { - state.enableGroupIdTracking(seenGroupIds); - return new AddInput() { // TODO return null meaning "don't collect me" and skip those - @Override - public void add(int positionOffset, IntBlock groupIds) {} - - @Override - public void add(int positionOffset, IntVector groupIds) {} - }; - } Vector valuesVector = valuesBlock.asVector(); if (valuesVector == null) { if (valuesBlock.mayHaveNulls()) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 3f696e3387c54..6f041a6681659 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -61,7 +61,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(BooleanVector vector) { int positions = vector.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(MultivalueDedupeBoolean.hashOrd(everSeen, vector.getBoolean(i))); } @@ -75,28 +75,30 @@ private IntBlock add(BooleanBlock block) { @Override public BooleanBlock[] getKeys() { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(everSeen.length); - if (everSeen[NULL_ORD]) { - builder.appendNull(); - } - if (everSeen[FALSE_ORD]) { - builder.appendBoolean(false); - } - if (everSeen[TRUE_ORD]) { - builder.appendBoolean(true); + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(everSeen.length)) { + if (everSeen[NULL_ORD]) { + builder.appendNull(); + } + if (everSeen[FALSE_ORD]) { + builder.appendBoolean(false); + } + if (everSeen[TRUE_ORD]) { + builder.appendBoolean(true); + } + return new BooleanBlock[] { builder.build() }; } - return new BooleanBlock[] { builder.build() }; } @Override public IntVector nonEmpty() { - IntVector.Builder builder = IntVector.newVectorBuilder(everSeen.length); - for (int i = 0; i < everSeen.length; i++) { - if (everSeen[i]) { - builder.appendInt(i); + try (IntVector.Builder builder = blockFactory.newIntVectorBuilder(everSeen.length)) { + for (int i = 0; i < everSeen.length; i++) { + if (everSeen[i]) { + builder.appendInt(i); + } } + return builder.build(); } - return builder.build(); } public BitArray seenGroupIds(BigArrays bigArrays) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java index 27e40e82f7df1..2f1bb4f858ff4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefBlockHash.java @@ -68,8 +68,8 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { addInput.add(0, groupIds); } } else { - try (IntBlock groupIds = add(bytesVector).asBlock()) { - addInput.add(0, groupIds.asVector()); + try (IntVector groupIds = add(bytesVector)) { + addInput.add(0, groupIds); } } } @@ -77,7 +77,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(BytesRefVector vector) { int positions = vector.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(bytesRefHash.add(vector.getBytesRef(i, bytes))))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java index 1721ded09849e..da2c85e532016 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java @@ -74,7 +74,9 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { if (vector1 != null && vector2 != null) { addInput.add(0, add(vector1, vector2)); } else { - new AddBlock(block1, block2, addInput).add(); + try (AddWork work = new AddWork(block1, block2, addInput)) { + work.add(); + } } } @@ -91,12 +93,12 @@ public IntVector add(BytesRefVector vector1, LongVector vector2) { private static final long[] EMPTY = new long[0]; - private class AddBlock extends LongLongBlockHash.AbstractAddBlock { + private class AddWork extends LongLongBlockHash.AbstractAddBlock { private final BytesRefBlock block1; private final LongBlock block2; - AddBlock(BytesRefBlock block1, LongBlock block2, GroupingAggregatorFunction.AddInput addInput) { - super(emitBatchSize, addInput); + AddWork(BytesRefBlock block1, LongBlock block2, GroupingAggregatorFunction.AddInput addInput) { + super(blockFactory, emitBatchSize, addInput); this.block1 = block1; this.block2 = block2; } @@ -165,18 +167,29 @@ void add() { @Override public Block[] getKeys() { int positions = (int) finalHash.size(); - BytesRefVector.Builder keys1 = BytesRefVector.newVectorBuilder(positions); - LongVector.Builder keys2 = LongVector.newVectorBuilder(positions); - BytesRef scratch = new BytesRef(); - for (long i = 0; i < positions; i++) { - keys2.appendLong(finalHash.getKey2(i)); - long h1 = finalHash.getKey1(i); - keys1.appendBytesRef(bytesHash.get(h1, scratch)); + BytesRefVector k1 = null; + LongVector k2 = null; + try ( + BytesRefVector.Builder keys1 = blockFactory.newBytesRefVectorBuilder(positions); + LongVector.Builder keys2 = blockFactory.newLongVectorBuilder(positions) + ) { + BytesRef scratch = new BytesRef(); + for (long i = 0; i < positions; i++) { + keys2.appendLong(finalHash.getKey2(i)); + long h1 = finalHash.getKey1(i); + keys1.appendBytesRef(bytesHash.get(h1, scratch)); + } + k1 = keys1.build(); + k2 = keys2.build(); + } finally { + if (k2 == null) { + Releasables.closeExpectNoException(k1); + } } if (reverseOutput) { - return new Block[] { keys2.build().asBlock(), keys1.build().asBlock() }; + return new Block[] { k2.asBlock(), k1.asBlock() }; } else { - return new Block[] { keys1.build().asBlock(), keys2.build().asBlock() }; + return new Block[] { k1.asBlock(), k2.asBlock() }; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java index bbf5911521122..a8a67180775fb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/DoubleBlockHash.java @@ -62,8 +62,8 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { addInput.add(0, groupIds); } } else { - try (IntBlock groupIds = add(doubleVector).asBlock()) { - addInput.add(0, groupIds.asVector()); + try (IntVector groupIds = add(doubleVector)) { + addInput.add(0, groupIds); } } } @@ -71,7 +71,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(DoubleVector vector) { int positions = vector.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(Double.doubleToLongBits(vector.getDouble(i)))))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java index 17c7fab73d2be..79e03e4dc0ed5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/IntBlockHash.java @@ -59,8 +59,8 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { addInput.add(0, groupIds); } } else { - try (IntBlock groupIds = add(intVector).asBlock()) { - addInput.add(0, groupIds.asVector()); + try (IntVector groupIds = add(intVector)) { + addInput.add(0, groupIds); } } } @@ -68,7 +68,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(IntVector vector) { int positions = vector.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getInt(i))))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java index 1d3be57bb40d7..c736cfae65ee7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongBlockHash.java @@ -62,8 +62,8 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { addInput.add(0, groupIds); } } else { - try (IntBlock groupIds = add(longVector).asBlock()) { // Ugh!! - addInput.add(0, groupIds.asVector()); + try (IntVector groupIds = add(longVector)) { + addInput.add(0, groupIds); } } } @@ -71,7 +71,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(LongVector vector) { int positions = vector.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(Math.toIntExact(hashOrdToGroupNullReserved(longHash.add(vector.getLong(i))))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index f36e8c76ec98b..ce53f0bb8e7f4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.aggregation.SeenGroupIds; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; @@ -74,13 +75,12 @@ private IntVector add(LongVector vector1, LongVector vector2) { private static final long[] EMPTY = new long[0]; - // TODO: this uses the non-breaking block factory - update to use this blockFactory - private class AddBlock extends AbstractAddBlock implements Releasable { + private class AddBlock extends AbstractAddBlock { private final LongBlock block1; private final LongBlock block2; AddBlock(LongBlock block1, LongBlock block2, GroupingAggregatorFunction.AddInput addInput) { - super(emitBatchSize, addInput); + super(blockFactory, emitBatchSize, addInput); this.block1 = block1; this.block2 = block2; } @@ -137,14 +137,10 @@ void add() { } emitOrds(); } - - @Override - public void close() { - Releasables.closeExpectNoException(block1, block2); - } } - static class AbstractAddBlock { + static class AbstractAddBlock implements Releasable { + private final BlockFactory blockFactory; private final int emitBatchSize; private final GroupingAggregatorFunction.AddInput addInput; @@ -152,11 +148,12 @@ static class AbstractAddBlock { private int added = 0; protected IntBlock.Builder ords; - AbstractAddBlock(int emitBatchSize, GroupingAggregatorFunction.AddInput addInput) { + AbstractAddBlock(BlockFactory blockFactory, int emitBatchSize, GroupingAggregatorFunction.AddInput addInput) { + this.blockFactory = blockFactory; this.emitBatchSize = emitBatchSize; this.addInput = addInput; - this.ords = IntBlock.newBlockBuilder(emitBatchSize); + this.ords = blockFactory.newIntBlockBuilder(emitBatchSize); } protected final void addedValue(int position) { @@ -174,13 +171,20 @@ protected final void addedValueInMultivaluePosition(int position) { } protected final void emitOrds() { - addInput.add(positionOffset, ords.build()); + try (IntBlock ordsBlock = ords.build()) { + addInput.add(positionOffset, ordsBlock); + } } private void rollover(int position) { emitOrds(); positionOffset = position; - ords = IntBlock.newBlockBuilder(emitBatchSize); // TODO add a clear method to the builder? + ords = blockFactory.newIntBlockBuilder(emitBatchSize); // TODO add a clear method to the builder? + } + + @Override + public final void close() { + ords.close(); } } @@ -197,13 +201,24 @@ static int add(long[] seen, int nextSeen, long v) { @Override public Block[] getKeys() { int positions = (int) hash.size(); - LongVector.Builder keys1 = blockFactory.newLongVectorBuilder(positions); - LongVector.Builder keys2 = blockFactory.newLongVectorBuilder(positions); - for (long i = 0; i < positions; i++) { - keys1.appendLong(hash.getKey1(i)); - keys2.appendLong(hash.getKey2(i)); + LongVector k1 = null; + LongVector k2 = null; + try ( + LongVector.Builder keys1 = blockFactory.newLongVectorBuilder(positions); + LongVector.Builder keys2 = blockFactory.newLongVectorBuilder(positions) + ) { + for (long i = 0; i < positions; i++) { + keys1.appendLong(hash.getKey1(i)); + keys2.appendLong(hash.getKey2(i)); + } + k1 = keys1.build(); + k2 = keys2.build(); + } finally { + if (k2 == null) { + Releasables.close(k1); + } } - return new Block[] { keys1.build().asBlock(), keys2.build().asBlock() }; + return new Block[] { k1.asBlock(), k2.asBlock() }; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java index a624a3ec68100..06b833974a5db 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/PackedValuesBlockHash.java @@ -73,7 +73,9 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { } void add(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) { - new AddWork(page, addInput, batchSize).add(); + try (AddWork work = new AddWork(page, addInput, batchSize)) { + work.add(); + } } private static class Group { @@ -95,7 +97,7 @@ class AddWork extends LongLongBlockHash.AbstractAddBlock { int position; AddWork(Page page, GroupingAggregatorFunction.AddInput addInput, int batchSize) { - super(emitBatchSize, addInput); + super(blockFactory, emitBatchSize, addInput); for (Group group : groups) { group.encoder = MultivalueDedupe.batchEncoder(new Block.Ref(page.getBlock(group.spec.channel()), page), batchSize, true); } @@ -193,45 +195,54 @@ public Block[] getKeys() { int size = Math.toIntExact(bytesRefHash.size()); BatchEncoder.Decoder[] decoders = new BatchEncoder.Decoder[groups.length]; Block.Builder[] builders = new Block.Builder[groups.length]; - for (int g = 0; g < builders.length; g++) { - ElementType elementType = groups[g].spec.elementType(); - decoders[g] = BatchEncoder.decoder(elementType); - builders[g] = elementType.newBlockBuilder(size, blockFactory); - } + try { + for (int g = 0; g < builders.length; g++) { + ElementType elementType = groups[g].spec.elementType(); + decoders[g] = BatchEncoder.decoder(elementType); + builders[g] = elementType.newBlockBuilder(size, blockFactory); + } - BytesRef[] values = new BytesRef[(int) Math.min(100, bytesRefHash.size())]; - BytesRef[] nulls = new BytesRef[values.length]; - for (int offset = 0; offset < values.length; offset++) { - values[offset] = new BytesRef(); - nulls[offset] = new BytesRef(); - nulls[offset].length = nullTrackingBytes; - } - int offset = 0; - for (int i = 0; i < bytesRefHash.size(); i++) { - values[offset] = bytesRefHash.get(i, values[offset]); + BytesRef[] values = new BytesRef[(int) Math.min(100, bytesRefHash.size())]; + BytesRef[] nulls = new BytesRef[values.length]; + for (int offset = 0; offset < values.length; offset++) { + values[offset] = new BytesRef(); + nulls[offset] = new BytesRef(); + nulls[offset].length = nullTrackingBytes; + } + int offset = 0; + for (int i = 0; i < bytesRefHash.size(); i++) { + values[offset] = bytesRefHash.get(i, values[offset]); - // Reference the null bytes in the nulls array and values in the values - nulls[offset].bytes = values[offset].bytes; - nulls[offset].offset = values[offset].offset; - values[offset].offset += nullTrackingBytes; - values[offset].length -= nullTrackingBytes; + // Reference the null bytes in the nulls array and values in the values + nulls[offset].bytes = values[offset].bytes; + nulls[offset].offset = values[offset].offset; + values[offset].offset += nullTrackingBytes; + values[offset].length -= nullTrackingBytes; - offset++; - if (offset == values.length) { + offset++; + if (offset == values.length) { + readKeys(decoders, builders, nulls, values, offset); + offset = 0; + } + } + if (offset > 0) { readKeys(decoders, builders, nulls, values, offset); - offset = 0; } - } - if (offset > 0) { - readKeys(decoders, builders, nulls, values, offset); - } - Block[] keyBlocks = new Block[groups.length]; - for (int g = 0; g < keyBlocks.length; g++) { - keyBlocks[g] = builders[g].build(); + Block[] keyBlocks = new Block[groups.length]; + try { + for (int g = 0; g < keyBlocks.length; g++) { + keyBlocks[g] = builders[g].build(); + } + } finally { + if (keyBlocks[keyBlocks.length - 1] == null) { + Releasables.closeExpectNoException(keyBlocks); + } + } + return keyBlocks; + } finally { + Releasables.closeExpectNoException(builders); } - Releasables.closeExpectNoException(builders); - return keyBlocks; } private void readKeys(BatchEncoder.Decoder[] decoders, Block.Builder[] builders, BytesRef[] nulls, BytesRef[] values, int count) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index bd13a9045a28a..d6d6584e1b534 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -140,8 +140,9 @@ protected final void ensureCapacity() { return; } int newSize = calculateNewArraySize(valuesLength); - adjustBreaker((long) (newSize - valuesLength) * elementSize()); + adjustBreaker(newSize * elementSize()); growValuesArray(newSize); + adjustBreaker(-valuesLength * elementSize()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index c5d6780e84685..8dca74109b2cc 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; +import org.elasticsearch.index.mapper.BlockLoader; import java.util.List; @@ -24,16 +25,9 @@ * or dense data. A Block can represent either single or multi valued data. A Block that represents * dense single-valued data can be viewed as a {@link Vector}. * - * TODO: update comment - *

    All Blocks share the same set of data retrieval methods, but actual concrete implementations - * effectively support a subset of these, throwing {@code UnsupportedOperationException} where a - * particular data retrieval method is not supported. For example, a Block of primitive longs may - * not support retrieval as an integer, {code getInt}. This greatly simplifies Block usage and - * avoids cumbersome use-site casting. - * *

    Block are immutable and can be passed between threads. */ -public interface Block extends Accountable, NamedWriteable, Releasable { +public interface Block extends Accountable, BlockLoader.Block, NamedWriteable, Releasable { /** * {@return an efficient dense single-value view of this block}. @@ -146,12 +140,19 @@ default boolean mvSortedAscending() { /** * {@return a constant null block with the given number of positions, using the non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstantNullBlock} */ // Eventually, this should use the GLOBAL breaking instance + @Deprecated static Block constantNullBlock(int positions) { return constantNullBlock(positions, BlockFactory.getNonBreakingInstance()); } + /** + * {@return a constant null block with the given number of positions}. + * @deprecated use {@link BlockFactory#newConstantNullBlock} + */ + @Deprecated static Block constantNullBlock(int positions, BlockFactory blockFactory) { return blockFactory.newConstantNullBlock(positions); } @@ -160,7 +161,7 @@ static Block constantNullBlock(int positions, BlockFactory blockFactory) { * Builds {@link Block}s. Typically, you use one of it's direct supinterfaces like {@link IntBlock.Builder}. * This is {@link Releasable} and should be released after building the block or if building the block fails. */ - interface Builder extends Releasable { + interface Builder extends BlockLoader.Builder, Releasable { /** * Appends a null value to the block. diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index ad5dfbf298200..d27f872881460 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -109,7 +109,7 @@ public BooleanBlock.Builder newBooleanBlockBuilder(int estimatedSize) { return new BooleanBlockBuilder(estimatedSize, this); } - BooleanVector.FixedBuilder newBooleanVectorFixedBuilder(int size) { + public BooleanVector.FixedBuilder newBooleanVectorFixedBuilder(int size) { return new BooleanVectorFixedBuilder(size, this); } @@ -172,7 +172,7 @@ public IntVector.Builder newIntVectorBuilder(int estimatedSize) { return new IntVectorBuilder(estimatedSize, this); } - IntVector.FixedBuilder newIntVectorFixedBuilder(int size) { + public IntVector.FixedBuilder newIntVectorFixedBuilder(int size) { return new IntVectorFixedBuilder(size, this); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 56a4dc249388f..89b40d6e46a14 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -161,6 +161,7 @@ public static Block[] fromList(BlockFactory blockFactory, List> lis public static Block deepCopyOf(Block block, BlockFactory blockFactory) { try (Block.Builder builder = block.elementType().newBlockBuilder(block.getPositionCount(), blockFactory)) { builder.copyFrom(block, 0, block.getPositionCount()); + builder.mvOrdering(block.mvOrdering()); return builder.build(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index dba0ced86e60e..9437bdd35e21f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -18,7 +19,7 @@ /** * Block implementation representing a constant null value. */ -public final class ConstantNullBlock extends AbstractBlock { +public final class ConstantNullBlock extends AbstractBlock implements BooleanBlock, IntBlock, LongBlock, DoubleBlock, BytesRefBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantNullBlock.class); @@ -32,7 +33,7 @@ public final class ConstantNullBlock extends AbstractBlock { } @Override - public Vector asVector() { + public ConstantNullVector asVector() { return null; } @@ -67,8 +68,8 @@ public ElementType elementType() { } @Override - public Block filter(int... positions) { - return blockFactory.newConstantNullBlock(positions.length); + public ConstantNullBlock filter(int... positions) { + return (ConstantNullBlock) blockFactory.newConstantNullBlock(positions.length); } public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -182,7 +183,12 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { - throw new UnsupportedOperationException(); + /* + * This is called when copying but otherwise doesn't do + * anything because there aren't multivalue fields in a + * block containing only nulls. + */ + return this; } @Override @@ -199,4 +205,34 @@ public void close() { closed = true; } } + + @Override + public boolean getBoolean(int valueIndex) { + assert false : "null block"; + throw new UnsupportedOperationException("null block"); + } + + @Override + public BytesRef getBytesRef(int valueIndex, BytesRef dest) { + assert false : "null block"; + throw new UnsupportedOperationException("null block"); + } + + @Override + public double getDouble(int valueIndex) { + assert false : "null block"; + throw new UnsupportedOperationException("null block"); + } + + @Override + public int getInt(int valueIndex) { + assert false : "null block"; + throw new UnsupportedOperationException("null block"); + } + + @Override + public long getLong(int valueIndex) { + assert false : "null block"; + throw new UnsupportedOperationException("null block"); + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java new file mode 100644 index 0000000000000..ebe1aeda24412 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullVector.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; + +/** + * This vector is never instantiated. This class serves as a type holder for {@link ConstantNullBlock#asVector()}. + */ +public final class ConstantNullVector extends AbstractVector implements BooleanVector, IntVector, LongVector, DoubleVector, BytesRefVector { + + private ConstantNullVector(int positionCount, BlockFactory blockFactory) { + super(positionCount, blockFactory); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public ConstantNullBlock asBlock() { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public ConstantNullVector filter(int... positions) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public boolean getBoolean(int position) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public BytesRef getBytesRef(int position, BytesRef dest) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public double getDouble(int position) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public int getInt(int position) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public long getLong(int position) { + assert false : "null vector"; + throw new UnsupportedOperationException("null vector"); + } + + @Override + public ElementType elementType() { + return ElementType.NULL; + } + + @Override + public boolean isConstant() { + return true; + } + + @Override + public long ramBytesUsed() { + return 0; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index ccd740bc91ba9..ed7e317bfc4c7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -145,7 +145,13 @@ public Block.Builder appendAllValuesToCurrentPosition(Block block) { @Override public Block.Builder mvOrdering(MvOrdering mvOrdering) { - throw new UnsupportedOperationException("doc blocks only contain one value per position"); + /* + * This is called when copying but otherwise doesn't do + * anything because there aren't multivalue fields in a + * block containing doc references. Every position can + * only reference one doc. + */ + return this; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java new file mode 100644 index 0000000000000..703d882b91029 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilder.java @@ -0,0 +1,147 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.index.mapper.BlockLoader; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; + +public class SingletonOrdinalsBuilder implements BlockLoader.SingletonOrdinalsBuilder, Releasable, Block.Builder { + private final BlockFactory blockFactory; + private final SortedDocValues docValues; + private int[] ords; + private int count; + + public SingletonOrdinalsBuilder(BlockFactory blockFactory, SortedDocValues docValues, int count) { + this.blockFactory = blockFactory; + this.docValues = docValues; + blockFactory.adjustBreaker(ordsSize(count), false); + this.ords = new int[count]; + } + + @Override + public SingletonOrdinalsBuilder appendNull() { + ords[count++] = -1; // real ords can't be < 0, so we use -1 as null + return this; + } + + @Override + public SingletonOrdinalsBuilder appendOrd(int value) { + ords[count++] = value; + return this; + } + + int[] ords() { + return ords; + } + + @Override + public SingletonOrdinalsBuilder beginPositionEntry() { + throw new UnsupportedOperationException("should only have one value per doc"); + } + + @Override + public SingletonOrdinalsBuilder endPositionEntry() { + throw new UnsupportedOperationException("should only have one value per doc"); + } + + @Override + public BytesRefBlock build() { + try { + long breakerSize = ordsSize(ords.length); + // Increment breaker for sorted ords. + blockFactory.adjustBreaker(breakerSize, false); + try { + int[] sortedOrds = ords.clone(); + Arrays.sort(sortedOrds); + int uniqueCount = compactToUnique(sortedOrds); + + try (BreakingBytesRefBuilder copies = new BreakingBytesRefBuilder(blockFactory.breaker(), "ords")) { + long offsetsAndLength = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (uniqueCount + 1) * Integer.BYTES; + blockFactory.adjustBreaker(offsetsAndLength, false); + breakerSize += offsetsAndLength; + int[] offsets = new int[uniqueCount + 1]; + for (int o = 0; o < uniqueCount; o++) { + BytesRef v = docValues.lookupOrd(sortedOrds[o]); + offsets[o] = copies.length(); + copies.append(v); + } + offsets[uniqueCount] = copies.length(); + + /* + * It'd be better if BytesRefBlock could run off of a deduplicated list of + * blocks. It can't at the moment. So we copy many times. + */ + BytesRef scratch = new BytesRef(); + scratch.bytes = copies.bytes(); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(ords.length)) { + for (int i = 0; i < ords.length; i++) { + if (ords[i] == -1) { + builder.appendNull(); + continue; + } + int o = Arrays.binarySearch(sortedOrds, 0, uniqueCount, ords[i]); + assert 0 <= o && o < uniqueCount; + scratch.offset = offsets[o]; + scratch.length = offsets[o + 1] - scratch.offset; + builder.appendBytesRef(scratch); + } + return builder.build(); + } + } + } finally { + blockFactory.adjustBreaker(-breakerSize, false); + } + } catch (IOException e) { + throw new UncheckedIOException("error resolving ordinals", e); + } + } + + @Override + public void close() { + blockFactory.adjustBreaker(-ordsSize(ords.length), false); + } + + @Override + public Block.Builder appendAllValuesToCurrentPosition(Block block) { + throw new UnsupportedOperationException(); + } + + @Override + public Block.Builder copyFrom(Block block, int beginInclusive, int endExclusive) { + throw new UnsupportedOperationException(); + } + + @Override + public Block.Builder mvOrdering(Block.MvOrdering mvOrdering) { + throw new UnsupportedOperationException(); + } + + private static long ordsSize(int ordsCount) { + return RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + ordsCount * Integer.BYTES; + } + + static int compactToUnique(int[] sortedOrds) { + Arrays.sort(sortedOrds); + int uniqueSize = 0; + int prev = -1; + for (int i = 0; i < sortedOrds.length; i++) { + if (sortedOrds[i] != prev) { + sortedOrds[uniqueSize++] = prev = sortedOrds[i]; + } + } + return uniqueSize; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 1dac4f1783e44..2ff537016459c 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -14,6 +14,7 @@ $endif$ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.mapper.BlockLoader; import java.io.IOException; @@ -21,7 +22,7 @@ import java.io.IOException; * Block that stores $type$ values. * This class is generated. Do not edit it. */ -public sealed interface $Type$Block extends Block permits $Type$ArrayBlock, $Type$VectorBlock { +public sealed interface $Type$Block extends Block permits $Type$ArrayBlock, $Type$VectorBlock, ConstantNullBlock { $if(BytesRef)$ BytesRef NULL_VALUE = new BytesRef(); @@ -202,31 +203,52 @@ $endif$ return result; } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#new$Type$BlockBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newBlockBuilder(int estimatedSize) { return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a builder. + * @deprecated use {@link BlockFactory#new$Type$BlockBuilder} + */ + @Deprecated static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.new$Type$BlockBuilder(estimatedSize); } - /** Returns a block using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. + * @deprecated use {@link BlockFactory#newConstant$Type$BlockWith} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static $Type$Block newConstantBlockWith($type$ value, int positions) { return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); } + /** + * Returns a constant block. + * @deprecated use {@link BlockFactory#newConstant$Type$BlockWith} + */ + @Deprecated static $Type$Block newConstantBlockWith($type$ value, int positions, BlockFactory blockFactory) { return blockFactory.newConstant$Type$BlockWith(value, positions); } - sealed interface Builder extends Block.Builder permits $Type$BlockBuilder { - + /** + * Builder for {@link $Type$Block} + */ + sealed interface Builder extends Block.Builder, BlockLoader.$Type$Builder permits $Type$BlockBuilder { /** * Appends a $type$ to the current entry. */ + @Override Builder append$Type$($type$ value); /** @@ -250,12 +272,11 @@ $endif$ @Override Builder mvOrdering(Block.MvOrdering mvOrdering); - // TODO boolean containsMvDups(); - /** * Appends the all values of the given block into a the current position * in this builder. */ + @Override Builder appendAllValuesToCurrentPosition(Block block); /** diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 90fd30f8b7e64..6ec41ccdc6ab9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -20,13 +20,15 @@ import java.io.IOException; * This class is generated. Do not edit it. */ $if(BytesRef)$ -public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector { +public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, ConstantNullVector { $elseif(boolean)$ -public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector { +public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector, + ConstantNullVector { $elseif(double)$ -public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector { +public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector, + ConstantNullVector { $else$ -public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector { +public sealed interface $Type$Vector extends Vector permits Constant$Type$Vector, $Type$ArrayVector, $Type$BigArrayVector, ConstantNullVector { $endif$ $if(BytesRef)$ @@ -141,8 +143,12 @@ $endif$ } } - /** Returns a builder using the {@link BlockFactory#getNonBreakingInstance block factory}. */ + /** + * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. + * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} + */ // Eventually, we want to remove this entirely, always passing an explicit BlockFactory + @Deprecated static Builder newVectorBuilder(int estimatedSize) { return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); } @@ -150,13 +156,16 @@ $endif$ $if(BytesRef)$ /** * Creates a builder that grows as needed. + * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} */ $else$ /** * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} */ $endif$ + @Deprecated static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.new$Type$VectorBuilder(estimatedSize); } @@ -166,7 +175,9 @@ $else$ /** * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} * if you know the size up front because it's faster. + * @deprecated use {@link BlockFactory#new$Type$VectorFixedBuilder} */ + @Deprecated static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { return blockFactory.new$Type$VectorFixedBuilder(size); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java deleted file mode 100644 index 28a9359497393..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockDocValuesReader.java +++ /dev/null @@ -1,698 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.SortedNumericDocValues; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.index.fielddata.FieldData; -import org.elasticsearch.index.fielddata.NumericDoubleValues; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; -import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; - -import java.io.IOException; - -/** - * A reader that supports reading doc-values from a Lucene segment in Block fashion. - */ -public abstract class BlockDocValuesReader { - - protected final Thread creationThread; - - public BlockDocValuesReader() { - this.creationThread = Thread.currentThread(); - } - - /** - * Returns the current doc that this reader is on. - */ - public abstract int docID(); - - /** - * The {@link Block.Builder} for data of this type. - */ - public abstract Block.Builder builder(int positionCount); - - /** - * Reads the values of the given documents specified in the input block - */ - public abstract Block readValues(IntVector docs) throws IOException; - - /** - * Reads the values of the given document into the builder - */ - public abstract void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException; - - /** - * Checks if the reader can be used to read a range documents starting with the given docID by the current thread. - */ - public static boolean canReuse(BlockDocValuesReader reader, int startingDocID) { - return reader != null && reader.creationThread == Thread.currentThread() && reader.docID() <= startingDocID; - } - - public static BlockDocValuesReader createBlockReader( - ValuesSource valuesSource, - ValuesSourceType valuesSourceType, - ElementType elementType, - LeafReaderContext leafReaderContext - ) throws IOException { - if (valuesSourceType instanceof UnsupportedValueSourceType) { - final UnsupportedValueSource bytesVS = (UnsupportedValueSource) valuesSource; - final SortedBinaryDocValues bytesValues = bytesVS.bytesValues(leafReaderContext); - return new BytesValuesReader(bytesValues); - } - if (CoreValuesSourceType.NUMERIC.equals(valuesSourceType) || CoreValuesSourceType.DATE.equals(valuesSourceType)) { - ValuesSource.Numeric numericVS = (ValuesSource.Numeric) valuesSource; - if (numericVS.isFloatingPoint()) { - if (elementType != ElementType.DOUBLE) { - throw new UnsupportedOperationException("can't extract [" + elementType + "] from floating point fields"); - } - final SortedNumericDoubleValues doubleValues = numericVS.doubleValues(leafReaderContext); - final NumericDoubleValues singleton = FieldData.unwrapSingleton(doubleValues); - if (singleton != null) { - return new DoubleSingletonValuesReader(singleton); - } - return new DoubleValuesReader(doubleValues); - } else { - final SortedNumericDocValues longValues = numericVS.longValues(leafReaderContext); - final NumericDocValues singleton = DocValues.unwrapSingleton(longValues); - if (singleton != null) { - return switch (elementType) { - case LONG -> new LongSingletonValuesReader(singleton); - case INT -> new IntSingletonValuesReader(singleton); - default -> throw new UnsupportedOperationException("can't extract [" + elementType + "] from integer fields"); - }; - } - return switch (elementType) { - case LONG -> new LongValuesReader(longValues); - case INT -> new IntValuesReader(longValues); - default -> throw new UnsupportedOperationException("can't extract [" + elementType + "] from integer fields"); - }; - } - } - if (CoreValuesSourceType.KEYWORD.equals(valuesSourceType) || CoreValuesSourceType.IP.equals(valuesSourceType)) { - if (elementType != ElementType.BYTES_REF) { - throw new UnsupportedOperationException("can't extract [" + elementType + "] from keywords"); - } - final ValuesSource.Bytes bytesVS = (ValuesSource.Bytes) valuesSource; - final SortedBinaryDocValues bytesValues = bytesVS.bytesValues(leafReaderContext); - return new BytesValuesReader(bytesValues); - } - if (CoreValuesSourceType.BOOLEAN.equals(valuesSourceType)) { - if (elementType != ElementType.BOOLEAN) { - throw new UnsupportedOperationException("can't extract [" + elementType + "] from booleans"); - } - ValuesSource.Numeric numericVS = (ValuesSource.Numeric) valuesSource; - final SortedNumericDocValues longValues = numericVS.longValues(leafReaderContext); - final NumericDocValues singleton = DocValues.unwrapSingleton(longValues); - if (singleton != null) { - return new BooleanSingletonValuesReader(singleton); - } - return new BooleanValuesReader(longValues); - } - if (valuesSourceType instanceof NullValueSourceType) { - return new NullValuesReader(); - } - throw new IllegalArgumentException("Field type [" + valuesSourceType.typeName() + "] is not supported"); - } - - private static class LongSingletonValuesReader extends BlockDocValuesReader { - private final NumericDocValues numericDocValues; - - LongSingletonValuesReader(NumericDocValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public LongBlock.Builder builder(int positionCount) { - return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public LongBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - int lastDoc = -1; - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < lastDoc) { - throw new IllegalStateException("docs within same block must be in order"); - } - if (numericDocValues.advanceExact(doc)) { - blockBuilder.appendLong(numericDocValues.longValue()); - } else { - blockBuilder.appendNull(); - } - lastDoc = doc; - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - LongBlock.Builder blockBuilder = (LongBlock.Builder) builder; - if (numericDocValues.advanceExact(docId)) { - blockBuilder.appendLong(numericDocValues.longValue()); - } else { - blockBuilder.appendNull(); - } - } - - @Override - public int docID() { - return numericDocValues.docID(); - } - - @Override - public String toString() { - return "LongSingletonValuesReader"; - } - } - - private static class LongValuesReader extends BlockDocValuesReader { - private final SortedNumericDocValues numericDocValues; - private int docID = -1; - - LongValuesReader(SortedNumericDocValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public LongBlock.Builder builder(int positionCount) { - return LongBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public LongBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < this.docID) { - throw new IllegalStateException("docs within same block must be in order"); - } - read(doc, blockBuilder); - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - read(docId, (LongBlock.Builder) builder); - } - - private void read(int doc, LongBlock.Builder builder) throws IOException { - this.docID = doc; - if (false == numericDocValues.advanceExact(doc)) { - builder.appendNull(); - return; - } - int count = numericDocValues.docValueCount(); - if (count == 1) { - builder.appendLong(numericDocValues.nextValue()); - return; - } - builder.beginPositionEntry(); - for (int v = 0; v < count; v++) { - builder.appendLong(numericDocValues.nextValue()); - } - builder.endPositionEntry(); - } - - @Override - public int docID() { - // There is a .docID on the numericDocValues but it is often not implemented. - return docID; - } - - @Override - public String toString() { - return "LongValuesReader"; - } - } - - private static class IntSingletonValuesReader extends BlockDocValuesReader { - private final NumericDocValues numericDocValues; - - IntSingletonValuesReader(NumericDocValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public IntBlock.Builder builder(int positionCount) { - return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public IntBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - int lastDoc = -1; - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < lastDoc) { - throw new IllegalStateException("docs within same block must be in order"); - } - if (numericDocValues.advanceExact(doc)) { - blockBuilder.appendInt(Math.toIntExact(numericDocValues.longValue())); - } else { - blockBuilder.appendNull(); - } - lastDoc = doc; - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - IntBlock.Builder blockBuilder = (IntBlock.Builder) builder; - if (numericDocValues.advanceExact(docId)) { - blockBuilder.appendInt(Math.toIntExact(numericDocValues.longValue())); - } else { - blockBuilder.appendNull(); - } - } - - @Override - public int docID() { - return numericDocValues.docID(); - } - - @Override - public String toString() { - return "LongSingletonValuesReader"; - } - } - - private static class IntValuesReader extends BlockDocValuesReader { - private final SortedNumericDocValues numericDocValues; - private int docID = -1; - - IntValuesReader(SortedNumericDocValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public IntBlock.Builder builder(int positionCount) { - return IntBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public IntBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < this.docID) { - // TODO this may not be true after sorting many docs in a single segment. - throw new IllegalStateException("docs within same block must be in order"); - } - read(doc, blockBuilder); - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - read(docId, (IntBlock.Builder) builder); - } - - private void read(int doc, IntBlock.Builder builder) throws IOException { - this.docID = doc; - if (false == numericDocValues.advanceExact(doc)) { - builder.appendNull(); - return; - } - int count = numericDocValues.docValueCount(); - if (count == 1) { - builder.appendInt(Math.toIntExact(numericDocValues.nextValue())); - return; - } - builder.beginPositionEntry(); - for (int v = 0; v < count; v++) { - builder.appendInt(Math.toIntExact(numericDocValues.nextValue())); - } - builder.endPositionEntry(); - } - - @Override - public int docID() { - // There is a .docID on on the numericDocValues but it is often not implemented. - return docID; - } - - @Override - public String toString() { - return "LongValuesReader"; - } - } - - private static class DoubleSingletonValuesReader extends BlockDocValuesReader { - private final NumericDoubleValues numericDocValues; - private int docID = -1; - - DoubleSingletonValuesReader(NumericDoubleValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public DoubleBlock.Builder builder(int positionCount) { - return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public DoubleBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - int lastDoc = -1; - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < lastDoc) { - throw new IllegalStateException("docs within same block must be in order"); - } - if (numericDocValues.advanceExact(doc)) { - blockBuilder.appendDouble(numericDocValues.doubleValue()); - } else { - blockBuilder.appendNull(); - } - lastDoc = doc; - this.docID = doc; - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - this.docID = docId; - DoubleBlock.Builder blockBuilder = (DoubleBlock.Builder) builder; - if (numericDocValues.advanceExact(this.docID)) { - blockBuilder.appendDouble(numericDocValues.doubleValue()); - } else { - blockBuilder.appendNull(); - } - } - - @Override - public int docID() { - return docID; - } - - @Override - public String toString() { - return "DoubleSingletonValuesReader"; - } - } - - private static class DoubleValuesReader extends BlockDocValuesReader { - private final SortedNumericDoubleValues numericDocValues; - private int docID = -1; - - DoubleValuesReader(SortedNumericDoubleValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public DoubleBlock.Builder builder(int positionCount) { - return DoubleBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public DoubleBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < this.docID) { - throw new IllegalStateException("docs within same block must be in order"); - } - read(doc, blockBuilder); - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - read(docId, (DoubleBlock.Builder) builder); - } - - private void read(int doc, DoubleBlock.Builder builder) throws IOException { - this.docID = doc; - if (false == numericDocValues.advanceExact(doc)) { - builder.appendNull(); - return; - } - int count = numericDocValues.docValueCount(); - if (count == 1) { - builder.appendDouble(numericDocValues.nextValue()); - return; - } - builder.beginPositionEntry(); - for (int v = 0; v < count; v++) { - builder.appendDouble(numericDocValues.nextValue()); - } - builder.endPositionEntry(); - } - - @Override - public int docID() { - return docID; - } - - @Override - public String toString() { - return "DoubleValuesReader"; - } - } - - private static class BytesValuesReader extends BlockDocValuesReader { - private final SortedBinaryDocValues binaryDV; - private int docID = -1; - - BytesValuesReader(SortedBinaryDocValues binaryDV) { - this.binaryDV = binaryDV; - } - - @Override - public BytesRefBlock.Builder builder(int positionCount) { - return BytesRefBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public BytesRefBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - for (int i = 0; i < docs.getPositionCount(); i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < this.docID) { - throw new IllegalStateException("docs within same block must be in order"); - } - read(doc, blockBuilder); - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - read(docId, (BytesRefBlock.Builder) builder); - } - - private void read(int doc, BytesRefBlock.Builder builder) throws IOException { - this.docID = doc; - if (false == binaryDV.advanceExact(doc)) { - builder.appendNull(); - return; - } - int count = binaryDV.docValueCount(); - if (count == 1) { - builder.appendBytesRef(binaryDV.nextValue()); - return; - } - builder.beginPositionEntry(); - for (int v = 0; v < count; v++) { - builder.appendBytesRef(binaryDV.nextValue()); - } - builder.endPositionEntry(); - } - - @Override - public int docID() { - return docID; - } - - @Override - public String toString() { - return "BytesValuesReader"; - } - } - - private static class BooleanSingletonValuesReader extends BlockDocValuesReader { - private final NumericDocValues numericDocValues; - - BooleanSingletonValuesReader(NumericDocValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public BooleanBlock.Builder builder(int positionCount) { - return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public BooleanBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - int lastDoc = -1; - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < lastDoc) { - throw new IllegalStateException("docs within same block must be in order"); - } - if (numericDocValues.advanceExact(doc)) { - blockBuilder.appendBoolean(numericDocValues.longValue() != 0); - } else { - blockBuilder.appendNull(); - } - lastDoc = doc; - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - BooleanBlock.Builder blockBuilder = (BooleanBlock.Builder) builder; - if (numericDocValues.advanceExact(docId)) { - blockBuilder.appendBoolean(numericDocValues.longValue() != 0); - } else { - blockBuilder.appendNull(); - } - } - - @Override - public int docID() { - return numericDocValues.docID(); - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } - } - - private static class BooleanValuesReader extends BlockDocValuesReader { - private final SortedNumericDocValues numericDocValues; - private int docID = -1; - - BooleanValuesReader(SortedNumericDocValues numericDocValues) { - this.numericDocValues = numericDocValues; - } - - @Override - public BooleanBlock.Builder builder(int positionCount) { - return BooleanBlock.newBlockBuilder(positionCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); - } - - @Override - public BooleanBlock readValues(IntVector docs) throws IOException { - final int positionCount = docs.getPositionCount(); - var blockBuilder = builder(positionCount); - for (int i = 0; i < positionCount; i++) { - int doc = docs.getInt(i); - // docs within same block must be in order - if (doc < this.docID) { - throw new IllegalStateException("docs within same block must be in order"); - } - read(doc, blockBuilder); - } - return blockBuilder.build(); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) throws IOException { - read(docId, (BooleanBlock.Builder) builder); - } - - private void read(int doc, BooleanBlock.Builder builder) throws IOException { - this.docID = doc; - if (false == numericDocValues.advanceExact(doc)) { - builder.appendNull(); - return; - } - int count = numericDocValues.docValueCount(); - if (count == 1) { - builder.appendBoolean(numericDocValues.nextValue() != 0); - return; - } - builder.beginPositionEntry(); - for (int v = 0; v < count; v++) { - builder.appendBoolean(numericDocValues.nextValue() != 0); - } - builder.endPositionEntry(); - } - - @Override - public int docID() { - // There is a .docID on the numericDocValues but it is often not implemented. - return docID; - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } - } - - private static class NullValuesReader extends BlockDocValuesReader { - private int docID = -1; - - @Override - public Block.Builder builder(int positionCount) { - return ElementType.NULL.newBlockBuilder(positionCount); - } - - @Override - public Block readValues(IntVector docs) throws IOException { - return Block.constantNullBlock(docs.getPositionCount()); - } - - @Override - public void readValuesFromSingleDoc(int docId, Block.Builder builder) { - this.docID = docId; - builder.appendNull(); - } - - @Override - public int docID() { - return docID; - } - - @Override - public String toString() { - return getClass().getSimpleName(); - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java new file mode 100644 index 0000000000000..a0d08bc798fbb --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/BlockReaderFactories.java @@ -0,0 +1,108 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.lucene; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.SortedSetDocValues; +import org.elasticsearch.common.logging.HeaderWarning; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +/** + * Resolves *how* ESQL loads field values. + */ +public final class BlockReaderFactories { + private BlockReaderFactories() {} + + /** + * Resolves *how* ESQL loads field values. + * @param searchContexts a search context per search index we're loading + * field from + * @param fieldName the name of the field to load + * @param asUnsupportedSource should the field be loaded as "unsupported"? + * These will always have {@code null} values + */ + public static List factories( + List searchContexts, + String fieldName, + boolean asUnsupportedSource + ) { + List factories = new ArrayList<>(searchContexts.size()); + + for (SearchContext searchContext : searchContexts) { + SearchExecutionContext ctx = searchContext.getSearchExecutionContext(); + if (asUnsupportedSource) { + factories.add(loaderToFactory(ctx.getIndexReader(), BlockDocValuesReader.nulls())); + continue; + } + MappedFieldType fieldType = ctx.getFieldType(fieldName); + if (fieldType == null) { + // the field does not exist in this context + factories.add(loaderToFactory(ctx.getIndexReader(), BlockDocValuesReader.nulls())); + continue; + } + BlockLoader loader = fieldType.blockLoader(new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + return ctx.getFullyQualifiedIndex().getName(); + } + + @Override + public SearchLookup lookup() { + return ctx.lookup(); + } + + @Override + public Set sourcePaths(String name) { + return ctx.sourcePath(name); + } + }); + if (loader == null) { + HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", fieldName); + factories.add(loaderToFactory(ctx.getIndexReader(), BlockDocValuesReader.nulls())); + continue; + } + factories.add(loaderToFactory(ctx.getIndexReader(), loader)); + } + + return factories; + } + + /** + * Converts a {@link BlockLoader}, something defined in core elasticsearch at + * the field level, into a {@link BlockDocValuesReader.Factory} which can be + * used inside ESQL. + */ + public static BlockDocValuesReader.Factory loaderToFactory(IndexReader reader, BlockLoader loader) { + return new BlockDocValuesReader.Factory() { + @Override + public BlockDocValuesReader build(int segment) throws IOException { + return loader.reader(reader.leaves().get(segment)); + } + + @Override + public boolean supportsOrdinals() { + return loader.supportsOrdinals(); + } + + @Override + public SortedSetDocValues ordinals(int segment) throws IOException { + return loader.ordinals(reader.leaves().get(segment)); + } + }; + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/IdValueSource.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/IdValueSource.java deleted file mode 100644 index 906d6a0932806..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/IdValueSource.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.search.aggregations.support.ValuesSource; - -public class IdValueSource extends ValuesSource.Bytes { - - private final IdFieldIndexFieldData indexFieldData; - - public IdValueSource(IdFieldIndexFieldData indexFieldData) { - this.indexFieldData = indexFieldData; - } - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext leafReaderContext) { - return indexFieldData.load(leafReaderContext).getBytesValues(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java index 7e0b5297b629b..75bd230638928 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneCountOperator.java @@ -13,11 +13,13 @@ import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -62,7 +64,7 @@ public Factory( @Override public SourceOperator get(DriverContext driverContext) { - return new LuceneCountOperator(sliceQueue, limit); + return new LuceneCountOperator(driverContext.blockFactory(), sliceQueue, limit); } @Override @@ -80,8 +82,8 @@ public String describe() { } } - public LuceneCountOperator(LuceneSliceQueue sliceQueue, int limit) { - super(PAGE_SIZE, sliceQueue); + public LuceneCountOperator(BlockFactory blockFactory, LuceneSliceQueue sliceQueue, int limit) { + super(blockFactory, PAGE_SIZE, sliceQueue); this.remainingDocs = limit; this.leafCollector = new LeafCollector() { @Override @@ -155,11 +157,17 @@ public Page getOutput() { // emit only one page if (remainingDocs <= 0 && pagesEmitted == 0) { pagesEmitted++; - page = new Page( - PAGE_SIZE, - LongBlock.newConstantBlockWith(totalHits, PAGE_SIZE), - BooleanBlock.newConstantBlockWith(true, PAGE_SIZE) - ); + LongBlock count = null; + BooleanBlock seen = null; + try { + count = LongBlock.newConstantBlockWith(totalHits, PAGE_SIZE, blockFactory); + seen = BooleanBlock.newConstantBlockWith(true, PAGE_SIZE, blockFactory); + page = new Page(PAGE_SIZE, count, seen); + } finally { + if (page == null) { + Releasables.closeExpectNoException(count, seen); + } + } } return page; } catch (IOException e) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index abb96446bb831..6536b08cd2419 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.logging.LogManager; @@ -37,6 +38,8 @@ public abstract class LuceneOperator extends SourceOperator { public static final int NO_LIMIT = Integer.MAX_VALUE; + protected final BlockFactory blockFactory; + private int processSlices; final int maxPageSize; private final LuceneSliceQueue sliceQueue; @@ -49,7 +52,8 @@ public abstract class LuceneOperator extends SourceOperator { int pagesEmitted; boolean doneCollecting; - public LuceneOperator(int maxPageSize, LuceneSliceQueue sliceQueue) { + public LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) { + this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.sliceQueue = sliceQueue; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 0bbb6571dc4fd..7b2b276a619c6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -11,12 +11,14 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; @@ -61,7 +63,7 @@ public Factory( @Override public SourceOperator get(DriverContext driverContext) { - return new LuceneSourceOperator(maxPageSize, sliceQueue, limit); + return new LuceneSourceOperator(driverContext.blockFactory(), maxPageSize, sliceQueue, limit); } @Override @@ -89,11 +91,11 @@ public String describe() { } } - public LuceneSourceOperator(int maxPageSize, LuceneSliceQueue sliceQueue, int limit) { - super(maxPageSize, sliceQueue); + public LuceneSourceOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue, int limit) { + super(blockFactory, maxPageSize, sliceQueue); this.minPageSize = Math.max(1, maxPageSize / 2); this.remainingDocs = limit; - this.docsBuilder = IntVector.newVectorBuilder(Math.min(limit, maxPageSize)); + this.docsBuilder = IntVector.newVectorBuilder(Math.min(limit, maxPageSize), blockFactory); this.leafCollector = new LeafCollector() { @Override public void setScorer(Scorable scorer) { @@ -143,16 +145,20 @@ public Page getOutput() { Page page = null; if (currentPagePos >= minPageSize || remainingDocs <= 0 || scorer.isDone()) { pagesEmitted++; - page = new Page( - currentPagePos, - new DocVector( - IntBlock.newConstantBlockWith(scorer.shardIndex(), currentPagePos).asVector(), - IntBlock.newConstantBlockWith(scorer.leafReaderContext().ord, currentPagePos).asVector(), - docsBuilder.build(), - true - ).asBlock() - ); - docsBuilder = IntVector.newVectorBuilder(Math.min(remainingDocs, maxPageSize)); + IntBlock shard = null; + IntBlock leaf = null; + IntVector docs = null; + try { + shard = IntBlock.newConstantBlockWith(scorer.shardIndex(), currentPagePos, blockFactory); + leaf = IntBlock.newConstantBlockWith(scorer.leafReaderContext().ord, currentPagePos, blockFactory); + docs = docsBuilder.build(); + docsBuilder = IntVector.newVectorBuilder(Math.min(remainingDocs, maxPageSize), blockFactory); + page = new Page(currentPagePos, new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock()); + } finally { + if (page == null) { + Releasables.closeExpectNoException(shard, leaf, docs); + } + } currentPagePos = 0; } return page; @@ -161,6 +167,11 @@ public Page getOutput() { } } + @Override + public void close() { + docsBuilder.close(); + } + @Override protected void describe(StringBuilder sb) { sb.append(", remainingDocs=").append(remainingDocs); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 4c6bb50ce9f7f..4ce0af3bd0ffe 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -16,12 +16,14 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; import org.elasticsearch.common.Strings; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.core.Releasables; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortBuilder; @@ -38,25 +40,6 @@ * Source operator that builds Pages out of the output of a TopFieldCollector (aka TopN) */ public final class LuceneTopNSourceOperator extends LuceneOperator { - /** - * Collected docs. {@code null} until we're {@link #emit(boolean)}. - */ - private ScoreDoc[] scoreDocs; - /** - * The offset in {@link #scoreDocs} of the next page. - */ - private int offset = 0; - - private PerShardCollector perShardCollector; - private final List> sorts; - private final int limit; - - public LuceneTopNSourceOperator(int maxPageSize, List> sorts, int limit, LuceneSliceQueue sliceQueue) { - super(maxPageSize, sliceQueue); - this.sorts = sorts; - this.limit = limit; - } - public static final class Factory implements LuceneOperator.Factory { private final int taskConcurrency; private final int maxPageSize; @@ -85,7 +68,7 @@ public Factory( @Override public SourceOperator get(DriverContext driverContext) { - return new LuceneTopNSourceOperator(maxPageSize, sorts, limit, sliceQueue); + return new LuceneTopNSourceOperator(driverContext.blockFactory(), maxPageSize, sorts, limit, sliceQueue); } @Override @@ -116,6 +99,31 @@ public String describe() { } } + /** + * Collected docs. {@code null} until we're {@link #emit(boolean)}. + */ + private ScoreDoc[] scoreDocs; + /** + * The offset in {@link #scoreDocs} of the next page. + */ + private int offset = 0; + + private PerShardCollector perShardCollector; + private final List> sorts; + private final int limit; + + public LuceneTopNSourceOperator( + BlockFactory blockFactory, + int maxPageSize, + List> sorts, + int limit, + LuceneSliceQueue sliceQueue + ) { + super(blockFactory, maxPageSize, sliceQueue); + this.sorts = sorts; + this.limit = limit; + } + @Override public boolean isFinished() { return doneCollecting && isEmitting() == false; @@ -187,29 +195,35 @@ private Page emit(boolean startEmitting) { return null; } int size = Math.min(maxPageSize, scoreDocs.length - offset); - IntVector.Builder currentSegmentBuilder = IntVector.newVectorBuilder(size); - IntVector.Builder currentDocsBuilder = IntVector.newVectorBuilder(size); + IntBlock shard = null; + IntVector segments = null; + IntVector docs = null; + Page page = null; + try ( + IntVector.Builder currentSegmentBuilder = IntVector.newVectorBuilder(size, blockFactory); + IntVector.Builder currentDocsBuilder = IntVector.newVectorBuilder(size, blockFactory) + ) { + int start = offset; + offset += size; + List leafContexts = perShardCollector.searchContext.searcher().getLeafContexts(); + for (int i = start; i < offset; i++) { + int doc = scoreDocs[i].doc; + int segment = ReaderUtil.subIndex(doc, leafContexts); + currentSegmentBuilder.appendInt(segment); + currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment + } - int start = offset; - offset += size; - List leafContexts = perShardCollector.searchContext.searcher().getLeafContexts(); - for (int i = start; i < offset; i++) { - int doc = scoreDocs[i].doc; - int segment = ReaderUtil.subIndex(doc, leafContexts); - currentSegmentBuilder.appendInt(segment); - currentDocsBuilder.appendInt(doc - leafContexts.get(segment).docBase); // the offset inside the segment + shard = IntBlock.newConstantBlockWith(perShardCollector.shardIndex, size, blockFactory); + segments = currentSegmentBuilder.build(); + docs = currentDocsBuilder.build(); + page = new Page(size, new DocVector(shard.asVector(), segments, docs, null).asBlock()); + } finally { + if (page == null) { + Releasables.close(shard, segments, docs); + } } - pagesEmitted++; - return new Page( - size, - new DocVector( - IntBlock.newConstantBlockWith(perShardCollector.shardIndex, size).asVector(), - currentSegmentBuilder.build(), - currentDocsBuilder.build(), - null - ).asBlock() - ); + return page; } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/NullValueSource.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/NullValueSource.java deleted file mode 100644 index fc9807b2e2410..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/NullValueSource.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Rounding; -import org.elasticsearch.index.fielddata.DocValueBits; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.aggregations.support.ValuesSource; - -import java.io.IOException; -import java.util.function.Function; - -public class NullValueSource extends ValuesSource { - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - - return new SortedBinaryDocValues() { - @Override - public boolean advanceExact(int doc) throws IOException { - return true; - } - - @Override - public int docValueCount() { - return 1; - } - - @Override - public BytesRef nextValue() throws IOException { - return null; - } - }; - } - - @Override - public DocValueBits docsWithValue(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - protected Function roundingPreparer(AggregationContext context) throws IOException { - throw new UnsupportedOperationException(); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/NullValueSourceType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/NullValueSourceType.java deleted file mode 100644 index fd354bd9e1a0b..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/NullValueSourceType.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.elasticsearch.script.AggregationScript; -import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.support.AggregationContext; -import org.elasticsearch.search.aggregations.support.FieldContext; -import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; - -public class NullValueSourceType implements ValuesSourceType { - - @Override - public ValuesSource getEmpty() { - throw new UnsupportedOperationException(); - } - - @Override - public ValuesSource getScript(AggregationScript.LeafFactory script, ValueType scriptValueType) { - throw new UnsupportedOperationException(); - } - - @Override - public ValuesSource getField(FieldContext fieldContext, AggregationScript.LeafFactory script) { - throw new UnsupportedOperationException(); - } - - @Override - public ValuesSource replaceMissing( - ValuesSource valuesSource, - Object rawMissing, - DocValueFormat docValueFormat, - AggregationContext context - ) { - throw new UnsupportedOperationException(); - } - - @Override - public String typeName() { - return null; - } - -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/UnsupportedValueSourceType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/UnsupportedValueSourceType.java index ead3d9b46d6a5..18e1c49d15390 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/UnsupportedValueSourceType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/UnsupportedValueSourceType.java @@ -9,12 +9,13 @@ import org.elasticsearch.script.AggregationScript; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import java.util.function.LongSupplier; + // just a placeholder class for unsupported data types public class UnsupportedValueSourceType implements ValuesSourceType { @@ -44,7 +45,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { throw new UnsupportedOperationException(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValueSourceInfo.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValueSourceInfo.java deleted file mode 100644 index e4dffdfe72c4d..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValueSourceInfo.java +++ /dev/null @@ -1,15 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.IndexReader; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; - -public record ValueSourceInfo(ValuesSourceType type, ValuesSource source, ElementType elementType, IndexReader reader) {} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValueSources.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValueSources.java deleted file mode 100644 index 29a539b1e068e..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValueSources.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.logging.HeaderWarning; -import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBinaryIndexFieldData; -import org.elasticsearch.index.fielddata.StoredFieldSortedBinaryIndexFieldData; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.SourceValueFetcher; -import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; -import org.elasticsearch.search.aggregations.support.FieldContext; -import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchRequest; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; - -public final class ValueSources { - - public static final String MATCH_ONLY_TEXT = "match_only_text"; - - private ValueSources() {} - - public static List sources( - List searchContexts, - String fieldName, - boolean asUnsupportedSource, - ElementType elementType - ) { - List sources = new ArrayList<>(searchContexts.size()); - - for (SearchContext searchContext : searchContexts) { - // TODO: remove this workaround - // Create a separate SearchExecutionContext for each ValuesReader, as it seems that - // the synthetic source doesn't work properly with inter-segment or intra-segment parallelism. - ShardSearchRequest shardRequest = searchContext.request(); - SearchExecutionContext ctx = searchContext.readerContext() - .indexService() - .newSearchExecutionContext( - shardRequest.shardId().id(), - shardRequest.shardRequestIndex(), - searchContext.searcher(), - shardRequest::nowInMillis, - shardRequest.getClusterAlias(), - shardRequest.getRuntimeMappings() - ); - var fieldType = ctx.getFieldType(fieldName); - if (fieldType == null) { - sources.add(new ValueSourceInfo(new NullValueSourceType(), new NullValueSource(), elementType, ctx.getIndexReader())); - continue; // the field does not exist in this context - } - if (asUnsupportedSource) { - sources.add( - new ValueSourceInfo( - new UnsupportedValueSourceType(fieldType.typeName()), - new UnsupportedValueSource(null), - elementType, - ctx.getIndexReader() - ) - ); - HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", fieldName); - continue; - } - - if (fieldType.hasDocValues() == false) { - // MatchOnlyTextFieldMapper class lives in the mapper-extras module. We use string equality - // for the field type name to avoid adding a dependency to the module - if (fieldType instanceof KeywordFieldMapper.KeywordFieldType - || fieldType instanceof TextFieldMapper.TextFieldType tft && (tft.isSyntheticSource() == false || tft.isStored()) - || MATCH_ONLY_TEXT.equals(fieldType.typeName())) { - ValuesSource vs = textValueSource(ctx, fieldType); - sources.add(new ValueSourceInfo(CoreValuesSourceType.KEYWORD, vs, elementType, ctx.getIndexReader())); - continue; - } - - if (IdFieldMapper.NAME.equals(fieldType.name())) { - ValuesSource vs = new IdValueSource(new IdFieldIndexFieldData(CoreValuesSourceType.KEYWORD)); - sources.add(new ValueSourceInfo(CoreValuesSourceType.KEYWORD, vs, elementType, ctx.getIndexReader())); - continue; - } - } - - IndexFieldData fieldData; - try { - fieldData = ctx.getForField(fieldType, MappedFieldType.FielddataOperation.SEARCH); - } catch (IllegalArgumentException e) { - sources.add(unsupportedValueSource(elementType, ctx, fieldType, e)); - HeaderWarning.addWarning("Field [{}] cannot be retrieved, it is unsupported or not indexed; returning null", fieldName); - continue; - } - var fieldContext = new FieldContext(fieldName, fieldData, fieldType); - var vsType = fieldData.getValuesSourceType(); - var vs = vsType.getField(fieldContext, null); - sources.add(new ValueSourceInfo(vsType, vs, elementType, ctx.getIndexReader())); - } - - return sources; - } - - private static ValueSourceInfo unsupportedValueSource( - ElementType elementType, - SearchExecutionContext ctx, - MappedFieldType fieldType, - IllegalArgumentException e - ) { - return switch (elementType) { - case BYTES_REF -> new ValueSourceInfo( - new UnsupportedValueSourceType(fieldType.typeName()), - new UnsupportedValueSource(null), - elementType, - ctx.getIndexReader() - ); - case LONG, INT -> new ValueSourceInfo( - CoreValuesSourceType.NUMERIC, - ValuesSource.Numeric.EMPTY, - elementType, - ctx.getIndexReader() - ); - case BOOLEAN -> new ValueSourceInfo( - CoreValuesSourceType.BOOLEAN, - ValuesSource.Numeric.EMPTY, - elementType, - ctx.getIndexReader() - ); - case DOUBLE -> new ValueSourceInfo(CoreValuesSourceType.NUMERIC, new ValuesSource.Numeric() { - @Override - public boolean isFloatingPoint() { - return true; - } - - @Override - public SortedNumericDocValues longValues(LeafReaderContext context) { - return DocValues.emptySortedNumeric(); - } - - @Override - public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException { - return org.elasticsearch.index.fielddata.FieldData.emptySortedNumericDoubles(); - } - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(); - } - }, elementType, ctx.getIndexReader()); - default -> throw e; - }; - } - - private static TextValueSource textValueSource(SearchExecutionContext ctx, MappedFieldType fieldType) { - if (fieldType.isStored()) { - IndexFieldData fieldData = new StoredFieldSortedBinaryIndexFieldData( - fieldType.name(), - CoreValuesSourceType.KEYWORD, - TextValueSource.TextDocValuesFieldWrapper::new - ) { - @Override - protected BytesRef storedToBytesRef(Object stored) { - return new BytesRef((String) stored); - } - }; - return new TextValueSource(fieldData); - } - - FieldDataContext fieldDataContext = new FieldDataContext( - ctx.getFullyQualifiedIndex().getName(), - () -> ctx.lookup().forkAndTrackFieldReferences(fieldType.name()), - ctx::sourcePath, - MappedFieldType.FielddataOperation.SEARCH - ); - IndexFieldData fieldData = new SourceValueFetcherSortedBinaryIndexFieldData.Builder( - fieldType.name(), - CoreValuesSourceType.KEYWORD, - SourceValueFetcher.toString(fieldDataContext.sourcePathsLookup().apply(fieldType.name())), - fieldDataContext.lookupSupplier().get(), - TextValueSource.TextDocValuesFieldWrapper::new - ).build(null, null); // Neither cache nor breakerService are used by SourceValueFetcherSortedBinaryIndexFieldData builder - return new TextValueSource(fieldData); - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java index 83fc902bd5077..61c1bd9730e02 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperator.java @@ -7,18 +7,24 @@ package org.elasticsearch.compute.lucene; -import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedDocValues; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.SingletonOrdinalsBuilder; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.xcontent.XContentBuilder; @@ -28,7 +34,6 @@ import java.util.Map; import java.util.Objects; import java.util.TreeMap; -import java.util.function.Supplier; /** * Operator that extracts doc_values from a Lucene index out of pages that have been produced by {@link LuceneSourceOperator} @@ -43,12 +48,12 @@ public class ValuesSourceReaderOperator extends AbstractPageMappingOperator { * @param docChannel the channel containing the shard, leaf/segment and doc id * @param field the lucene field being loaded */ - public record ValuesSourceReaderOperatorFactory(Supplier> sources, int docChannel, String field) + public record ValuesSourceReaderOperatorFactory(List sources, int docChannel, String field) implements OperatorFactory { @Override public Operator get(DriverContext driverContext) { - return new ValuesSourceReaderOperator(sources.get(), docChannel, field); + return new ValuesSourceReaderOperator(driverContext.blockFactory(), sources, docChannel, field); } @Override @@ -57,9 +62,14 @@ public String describe() { } } - private final List sources; + /** + * A list, one entry per shard, of factories for {@link BlockDocValuesReader}s + * which perform the actual reading. + */ + private final List factories; private final int docChannel; private final String field; + private final ComputeBlockLoaderFactory blockFactory; private BlockDocValuesReader lastReader; private int lastShard = -1; @@ -69,14 +79,20 @@ public String describe() { /** * Creates a new extractor - * @param sources the value source, type and index readers to use for extraction + * @param factories builds {@link BlockDocValuesReader} * @param docChannel the channel containing the shard, leaf/segment and doc id * @param field the lucene field being loaded */ - public ValuesSourceReaderOperator(List sources, int docChannel, String field) { - this.sources = sources; + public ValuesSourceReaderOperator( + BlockFactory blockFactory, + List factories, + int docChannel, + String field + ) { + this.factories = factories; this.docChannel = docChannel; this.field = field; + this.blockFactory = new ComputeBlockLoaderFactory(blockFactory); } @Override @@ -95,35 +111,48 @@ protected Page process(Page page) { private Block loadFromSingleLeaf(DocVector docVector) throws IOException { setupReader(docVector.shards().getInt(0), docVector.segments().getInt(0), docVector.docs().getInt(0)); - return lastReader.readValues(docVector.docs()); + return ((Block) lastReader.readValues(blockFactory, new BlockLoader.Docs() { + private final IntVector docs = docVector.docs(); + + @Override + public int count() { + return docs.getPositionCount(); + } + + @Override + public int get(int i) { + return docs.getInt(i); + } + })); } private Block loadFromManyLeaves(DocVector docVector) throws IOException { int[] forwards = docVector.shardSegmentDocMapForwards(); int doc = docVector.docs().getInt(forwards[0]); setupReader(docVector.shards().getInt(forwards[0]), docVector.segments().getInt(forwards[0]), doc); - Block.Builder builder = lastReader.builder(forwards.length); - lastReader.readValuesFromSingleDoc(doc, builder); - for (int i = 1; i < forwards.length; i++) { - int shard = docVector.shards().getInt(forwards[i]); - int segment = docVector.segments().getInt(forwards[i]); - doc = docVector.docs().getInt(forwards[i]); - if (segment != lastSegment || shard != lastShard) { - setupReader(shard, segment, doc); - } + try (BlockLoader.Builder builder = lastReader.builder(blockFactory, forwards.length)) { lastReader.readValuesFromSingleDoc(doc, builder); + for (int i = 1; i < forwards.length; i++) { + int shard = docVector.shards().getInt(forwards[i]); + int segment = docVector.segments().getInt(forwards[i]); + doc = docVector.docs().getInt(forwards[i]); + if (segment != lastSegment || shard != lastShard) { + setupReader(shard, segment, doc); + } + lastReader.readValuesFromSingleDoc(doc, builder); + } + try (Block orig = ((Block.Builder) builder).build()) { + return orig.filter(docVector.shardSegmentDocMapBackwards()); + } } - // TODO maybe it's better for downstream consumers if we perform a copy here. - return builder.build().filter(docVector.shardSegmentDocMapBackwards()); } private void setupReader(int shard, int segment, int doc) throws IOException { if (lastSegment == segment && lastShard == shard && BlockDocValuesReader.canReuse(lastReader, doc)) { return; } - var info = sources.get(shard); - LeafReaderContext leafReaderContext = info.reader().leaves().get(segment); - lastReader = BlockDocValuesReader.createBlockReader(info.source(), info.type(), info.elementType(), leafReaderContext); + + lastReader = factories.get(shard).build(segment); lastShard = shard; lastSegment = segment; readersBuilt.compute(lastReader.toString(), (k, v) -> v == null ? 1 : v + 1); @@ -203,4 +232,72 @@ public String toString() { return Strings.toString(this); } } + + private static class ComputeBlockLoaderFactory implements BlockLoader.BuilderFactory { + private final BlockFactory factory; + + private ComputeBlockLoaderFactory(BlockFactory factory) { + this.factory = factory; + } + + @Override + public BlockLoader.BooleanBuilder booleansFromDocValues(int expectedCount) { + return factory.newBooleanBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } + + @Override + public BlockLoader.BooleanBuilder booleans(int expectedCount) { + return factory.newBooleanBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.BytesRefBuilder bytesRefsFromDocValues(int expectedCount) { + return factory.newBytesRefBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } + + @Override + public BlockLoader.BytesRefBuilder bytesRefs(int expectedCount) { + return factory.newBytesRefBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.DoubleBuilder doublesFromDocValues(int expectedCount) { + return factory.newDoubleBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } + + @Override + public BlockLoader.DoubleBuilder doubles(int expectedCount) { + return factory.newDoubleBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.IntBuilder intsFromDocValues(int expectedCount) { + return factory.newIntBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } + + @Override + public BlockLoader.IntBuilder ints(int expectedCount) { + return factory.newIntBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.LongBuilder longsFromDocValues(int expectedCount) { + return factory.newLongBlockBuilder(expectedCount).mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); + } + + @Override + public BlockLoader.LongBuilder longs(int expectedCount) { + return factory.newLongBlockBuilder(expectedCount); + } + + @Override + public BlockLoader.Builder nulls(int expectedCount) { + return ElementType.NULL.newBlockBuilder(expectedCount, factory); + } + + @Override + public BlockLoader.SingletonOrdinalsBuilder singletonOrdinalsBuilder(SortedDocValues ordinals, int count) { + return new SingletonOrdinalsBuilder(factory, ordinals, count); + } + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index ffeb7aac1f98c..65efdc4266b28 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -29,8 +29,7 @@ public Operator get(DriverContext driverContext) { @Override public String describe() { - // TODO ThrowingDriverContext blows up when combined with Concat - return "EvalOperator[evaluator=" + evaluator.get(new ThrowingDriverContext()) + "]"; + return "EvalOperator[evaluator=" + evaluator + "]"; } } @@ -65,7 +64,7 @@ public void close() { public interface ExpressionEvaluator extends Releasable { /** A Factory for creating ExpressionEvaluators. */ interface Factory { - ExpressionEvaluator get(DriverContext driverContext); + ExpressionEvaluator get(DriverContext context); } /** @@ -74,6 +73,18 @@ interface Factory { Block.Ref eval(Page page); } + public static final ExpressionEvaluator.Factory CONSTANT_NULL_FACTORY = new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext driverContext) { + return CONSTANT_NULL; + } + + @Override + public String toString() { + return CONSTANT_NULL.toString(); + } + }; + public static final ExpressionEvaluator CONSTANT_NULL = new ExpressionEvaluator() { @Override public Block.Ref eval(Page page) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java index 517743357a440..ea4f9dc1e05a6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/MultivalueDedupe.java @@ -18,6 +18,8 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import java.util.function.BiFunction; + /** * Utilities to remove duplicates from multivalued fields. */ @@ -77,44 +79,29 @@ public static Block.Ref dedupeToBlockUsingCopyAndSort(Block.Ref ref, BlockFactor * Build and {@link EvalOperator.ExpressionEvaluator} that deduplicates values * using an adaptive algorithm based on the size of the input list. */ - public static ExpressionEvaluator.Factory evaluator(ElementType elementType, ExpressionEvaluator.Factory nextSupplier) { + public static ExpressionEvaluator.Factory evaluator(ElementType elementType, ExpressionEvaluator.Factory field) { return switch (elementType) { - case BOOLEAN -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { - @Override - public Block.Ref eval(Page page) { - return new MultivalueDedupeBoolean(field.eval(page)).dedupeToBlock(dvrCtx.blockFactory()); - } - }; - case BYTES_REF -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { - @Override - public Block.Ref eval(Page page) { - return new MultivalueDedupeBytesRef(field.eval(page)).dedupeToBlockAdaptive(dvrCtx.blockFactory()); - } - }; - case INT -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { - @Override - public Block.Ref eval(Page page) { - return new MultivalueDedupeInt(field.eval(page)).dedupeToBlockAdaptive(dvrCtx.blockFactory()); - } - }; - case LONG -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { - @Override - public Block.Ref eval(Page page) { - return new MultivalueDedupeLong(field.eval(page)).dedupeToBlockAdaptive(dvrCtx.blockFactory()); - } - }; - case DOUBLE -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { - @Override - public Block.Ref eval(Page page) { - return new MultivalueDedupeDouble(field.eval(page)).dedupeToBlockAdaptive(dvrCtx.blockFactory()); - } - }; - case NULL -> dvrCtx -> new MvDedupeEvaluator(nextSupplier.get(dvrCtx)) { - @Override - public Block.Ref eval(Page page) { - return field.eval(page); // The page is all nulls and when you dedupe that it's still all nulls - } - }; + case BOOLEAN -> new EvaluatorFactory( + field, + (blockFactory, ref) -> new MultivalueDedupeBoolean(ref).dedupeToBlock(blockFactory) + ); + case BYTES_REF -> new EvaluatorFactory( + field, + (blockFactory, ref) -> new MultivalueDedupeBytesRef(ref).dedupeToBlockAdaptive(blockFactory) + ); + case INT -> new EvaluatorFactory( + field, + (blockFactory, ref) -> new MultivalueDedupeInt(ref).dedupeToBlockAdaptive(blockFactory) + ); + case LONG -> new EvaluatorFactory( + field, + (blockFactory, ref) -> new MultivalueDedupeLong(ref).dedupeToBlockAdaptive(blockFactory) + ); + case DOUBLE -> new EvaluatorFactory( + field, + (blockFactory, ref) -> new MultivalueDedupeDouble(ref).dedupeToBlockAdaptive(blockFactory) + ); + case NULL -> field; // The page is all nulls and when you dedupe that it's still all nulls default -> throw new IllegalArgumentException("unsupported type [" + elementType + "]"); }; } @@ -156,11 +143,34 @@ public static BatchEncoder batchEncoder(Block.Ref ref, int batchSize, boolean al } } - private abstract static class MvDedupeEvaluator implements EvalOperator.ExpressionEvaluator { - protected final EvalOperator.ExpressionEvaluator field; + private record EvaluatorFactory(ExpressionEvaluator.Factory field, BiFunction dedupe) + implements + ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Evaluator(context.blockFactory(), field.get(context), dedupe); + } - private MvDedupeEvaluator(EvalOperator.ExpressionEvaluator field) { + @Override + public String toString() { + return "MvDedupe[field=" + field + "]"; + } + } + + private static class Evaluator implements ExpressionEvaluator { + private final BlockFactory blockFactory; + private final ExpressionEvaluator field; + private final BiFunction dedupe; + + protected Evaluator(BlockFactory blockFactory, ExpressionEvaluator field, BiFunction dedupe) { + this.blockFactory = blockFactory; this.field = field; + this.dedupe = dedupe; + } + + @Override + public Block.Ref eval(Page page) { + return dedupe.apply(blockFactory, field.eval(page)); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 216627f996cad..07494f97cfd6d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -7,11 +7,11 @@ package org.elasticsearch.compute.operator; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BitArray; import org.elasticsearch.compute.Describable; @@ -24,15 +24,15 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; +import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.ValueSourceInfo; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.HashAggregationOperator.GroupSpec; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.index.mapper.BlockDocValuesReader; import java.io.IOException; import java.io.UncheckedIOException; @@ -52,7 +52,8 @@ */ public class OrdinalsGroupingOperator implements Operator { public record OrdinalsGroupingOperatorFactory( - Supplier> sources, + List readerFactories, + ElementType groupingElementType, int docChannel, String groupingField, List aggregators, @@ -63,7 +64,8 @@ public record OrdinalsGroupingOperatorFactory( @Override public Operator get(DriverContext driverContext) { return new OrdinalsGroupingOperator( - sources.get(), + readerFactories, + groupingElementType, docChannel, groupingField, aggregators, @@ -79,11 +81,12 @@ public String describe() { } } - private final List sources; + private final List readerFactories; private final int docChannel; private final String groupingField; private final List aggregatorFactories; + private final ElementType groupingElementType; private final Map ordinalAggregators; private final BigArrays bigArrays; @@ -96,7 +99,8 @@ public String describe() { private ValuesAggregator valuesAggregator; public OrdinalsGroupingOperator( - List sources, + List readerFactories, + ElementType groupingElementType, int docChannel, String groupingField, List aggregatorFactories, @@ -105,7 +109,8 @@ public OrdinalsGroupingOperator( DriverContext driverContext ) { Objects.requireNonNull(aggregatorFactories); - this.sources = sources; + this.readerFactories = readerFactories; + this.groupingElementType = groupingElementType; this.docChannel = docChannel; this.groupingField = groupingField; this.aggregatorFactories = aggregatorFactories; @@ -126,22 +131,20 @@ public void addInput(Page page) { requireNonNull(page, "page is null"); DocVector docVector = page.getBlock(docChannel).asVector(); final int shardIndex = docVector.shards().getInt(0); - final var source = sources.get(shardIndex); + final var readerFactory = readerFactories.get(shardIndex); boolean pagePassed = false; try { - if (docVector.singleSegmentNonDecreasing() && source.source() instanceof ValuesSource.Bytes.WithOrdinals withOrdinals) { + if (docVector.singleSegmentNonDecreasing() && readerFactory.supportsOrdinals()) { final IntVector segmentIndexVector = docVector.segments(); assert segmentIndexVector.isConstant(); final OrdinalSegmentAggregator ordinalAggregator = this.ordinalAggregators.computeIfAbsent( new SegmentID(shardIndex, segmentIndexVector.getInt(0)), k -> { try { - final LeafReaderContext leafReaderContext = source.reader().leaves().get(k.segmentIndex); return new OrdinalSegmentAggregator( driverContext.blockFactory(), this::createGroupingAggregators, - withOrdinals, - leafReaderContext, + () -> readerFactory.ordinals(k.segmentIndex), bigArrays ); } catch (IOException e) { @@ -155,7 +158,8 @@ public void addInput(Page page) { if (valuesAggregator == null) { int channelIndex = page.getBlockCount(); // extractor will append a new block at the end valuesAggregator = new ValuesAggregator( - sources, + readerFactories, + groupingElementType, docChannel, groupingField, channelIndex, @@ -327,29 +331,26 @@ record SegmentID(int shardIndex, int segmentIndex) { static final class OrdinalSegmentAggregator implements Releasable, SeenGroupIds { private final BlockFactory blockFactory; private final List aggregators; - private final ValuesSource.Bytes.WithOrdinals withOrdinals; - private final LeafReaderContext leafReaderContext; + private final CheckedSupplier docValuesSupplier; private final BitArray visitedOrds; private BlockOrdinalsReader currentReader; OrdinalSegmentAggregator( BlockFactory blockFactory, Supplier> aggregatorsSupplier, - ValuesSource.Bytes.WithOrdinals withOrdinals, - LeafReaderContext leafReaderContext, + CheckedSupplier docValuesSupplier, BigArrays bigArrays ) throws IOException { boolean success = false; List groupingAggregators = null; BitArray bitArray = null; try { - final SortedSetDocValues sortedSetDocValues = withOrdinals.ordinalsValues(leafReaderContext); + final SortedSetDocValues sortedSetDocValues = docValuesSupplier.get(); bitArray = new BitArray(sortedSetDocValues.getValueCount(), bigArrays); groupingAggregators = aggregatorsSupplier.get(); this.currentReader = new BlockOrdinalsReader(sortedSetDocValues, blockFactory); this.blockFactory = blockFactory; - this.withOrdinals = withOrdinals; - this.leafReaderContext = leafReaderContext; + this.docValuesSupplier = docValuesSupplier; this.aggregators = groupingAggregators; this.visitedOrds = bitArray; success = true; @@ -369,7 +370,7 @@ void addInput(IntVector docs, Page page) { } if (BlockOrdinalsReader.canReuse(currentReader, docs.getInt(0)) == false) { - currentReader = new BlockOrdinalsReader(withOrdinals.ordinalsValues(leafReaderContext), blockFactory); + currentReader = new BlockOrdinalsReader(docValuesSupplier.get(), blockFactory); } try (IntBlock ordinals = currentReader.readOrdinalsAdded1(docs)) { for (int p = 0; p < ordinals.getPositionCount(); p++) { @@ -392,7 +393,7 @@ void addInput(IntVector docs, Page page) { } AggregatedResultIterator getResultIterator() throws IOException { - return new AggregatedResultIterator(aggregators, visitedOrds, withOrdinals.ordinalsValues(leafReaderContext)); + return new AggregatedResultIterator(aggregators, visitedOrds, docValuesSupplier.get()); } boolean seenNulls() { @@ -457,7 +458,8 @@ private static class ValuesAggregator implements Releasable { private final HashAggregationOperator aggregator; ValuesAggregator( - List sources, + List factories, + ElementType groupingElementType, int docChannel, String groupingField, int channelIndex, @@ -465,15 +467,10 @@ private static class ValuesAggregator implements Releasable { int maxPageSize, DriverContext driverContext ) { - this.extractor = new ValuesSourceReaderOperator(sources, docChannel, groupingField); + this.extractor = new ValuesSourceReaderOperator(BlockFactory.getNonBreakingInstance(), factories, docChannel, groupingField); this.aggregator = new HashAggregationOperator( aggregatorFactories, - () -> BlockHash.build( - List.of(new GroupSpec(channelIndex, sources.get(0).elementType())), - driverContext, - maxPageSize, - false - ), + () -> BlockHash.build(List.of(new GroupSpec(channelIndex, groupingElementType)), driverContext, maxPageSize, false), driverContext ); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index 0b2cb20ecdabd..979777d4cfd03 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -154,7 +154,7 @@ void fetchPage() { final LoopControl loopControl = new LoopControl(); while (loopControl.isRunning()) { loopControl.exiting(); - // finish other sinks if one of them failed or sources no longer need pages. + // finish other sinks if one of them failed or source no longer need pages. boolean toFinishSinks = buffer.noMoreInputs() || failure.get() != null; remoteSink.fetchPageAsync(toFinishSinks, ActionListener.wrap(resp -> { Page page = resp.takePage(); @@ -249,7 +249,7 @@ protected void closeInternal() { /** * Add a listener, which will be notified when this exchange source handler is completed. An exchange source - * handler is consider completed when all exchange sources and sinks are completed and de-attached. + * handler is consider completed when all exchange factories and sinks are completed and de-attached. */ public void addCompletionListener(ActionListener listener) { completionFuture.addListener(listener); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index 7b29f82f085c5..b45f597553e1b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -14,7 +14,6 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; @@ -46,10 +45,10 @@ import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperator; -import org.elasticsearch.compute.lucene.ValueSourceInfo; import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -62,13 +61,10 @@ import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; @@ -80,7 +76,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.function.LongUnaryOperator; import static org.elasticsearch.compute.aggregation.AggregatorMode.FINAL; import static org.elasticsearch.compute.aggregation.AggregatorMode.INITIAL; @@ -177,19 +172,23 @@ protected Page process(Page page) { int positionCount = docVector.getPositionCount(); IntVector shards = docVector.shards(); if (randomBoolean()) { - IntVector.Builder builder = IntVector.newVectorBuilder(positionCount); - for (int i = 0; i < positionCount; i++) { - builder.appendInt(shards.getInt(i)); + try (IntVector.Builder builder = IntVector.newVectorBuilder(positionCount)) { + for (int i = 0; i < positionCount; i++) { + builder.appendInt(shards.getInt(i)); + } + shards.close(); + shards = builder.build(); } - shards = builder.build(); } IntVector segments = docVector.segments(); if (randomBoolean()) { - IntVector.Builder builder = IntVector.newVectorBuilder(positionCount); - for (int i = 0; i < positionCount; i++) { - builder.appendInt(segments.getInt(i)); + try (IntVector.Builder builder = IntVector.newVectorBuilder(positionCount)) { + for (int i = 0; i < positionCount; i++) { + builder.appendInt(segments.getInt(i)); + } + segments.close(); + segments = builder.build(); } - segments = builder.build(); } IntVector docs = docVector.docs(); if (randomBoolean()) { @@ -198,6 +197,7 @@ protected Page process(Page page) { ids.add(docs.getInt(i)); } Collections.shuffle(ids, random()); + docs.close(); docs = blockFactory.newIntArrayVector(ids.stream().mapToInt(n -> n).toArray(), positionCount); } Block[] blocks = new Block[page.getBlockCount()]; @@ -231,13 +231,9 @@ public String toString() { }, new OrdinalsGroupingOperator( List.of( - new ValueSourceInfo( - CoreValuesSourceType.KEYWORD, - randomBoolean() ? getOrdinalsValuesSource(gField) : getBytesValuesSource(gField), - ElementType.BYTES_REF, - reader - ) + BlockReaderFactories.loaderToFactory(reader, new KeywordFieldMapper.KeywordFieldType("g").blockLoader(null)) ), + ElementType.BYTES_REF, 0, gField, List.of(CountAggregatorFunction.supplier(bigArrays, List.of(1)).groupingAggregatorFactory(INITIAL)), @@ -333,61 +329,6 @@ public ScoreMode scoreMode() { return docIds; } - static ValuesSource.Bytes.WithOrdinals getOrdinalsValuesSource(String field) { - return new ValuesSource.Bytes.WithOrdinals() { - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return getBytesValuesSource(field).bytesValues(context); - } - - @Override - public SortedSetDocValues ordinalsValues(LeafReaderContext context) throws IOException { - return context.reader().getSortedSetDocValues(field); - } - - @Override - public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) { - throw new UnsupportedOperationException(); - } - - @Override - public boolean supportsGlobalOrdinalsMapping() { - return false; - } - - @Override - public LongUnaryOperator globalOrdinalsMapping(LeafReaderContext context) { - throw new UnsupportedOperationException(); - } - }; - } - - static ValuesSource.Bytes getBytesValuesSource(String field) { - return new ValuesSource.Bytes() { - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - final SortedSetDocValues dv = context.reader().getSortedSetDocValues(field); - return new SortedBinaryDocValues() { - @Override - public boolean advanceExact(int doc) throws IOException { - return dv.advanceExact(doc); - } - - @Override - public int docValueCount() { - return dv.docValueCount(); - } - - @Override - public BytesRef nextValue() throws IOException { - return dv.lookupOrd(dv.nextOrd()); - } - }; - } - }; - } - /** * Creates a {@link BigArrays} that tracks releases but doesn't throw circuit breaking exceptions. */ diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index 83f4a6895b154..894b94476c08d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -29,7 +29,7 @@ import org.elasticsearch.compute.operator.NullInsertingSourceOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.PositionMergingSourceOperator; -import org.elasticsearch.compute.operator.ResultPageSinkOperator; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import java.util.ArrayList; import java.util.List; @@ -43,12 +43,6 @@ import static org.hamcrest.Matchers.hasSize; public abstract class AggregatorFunctionTestCase extends ForkingOperatorTestCase { - - @Override - protected DriverContext driverContext() { - return breakingDriverContext(); - } - protected abstract AggregatorFunctionSupplier aggregatorFunction(BigArrays bigArrays, List inputChannels); protected final int aggregatorIntermediateBlockCount() { @@ -110,7 +104,7 @@ public final void testIgnoresNulls() { driverContext, new NullInsertingSourceOperator(new CannedSourceOperator(input.iterator()), blockFactory), List.of(simple(nonBreakingBigArrays().withCircuitBreaking()).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index ba930e943e79a..753b5878de2ae 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -48,12 +48,6 @@ import static org.hamcrest.Matchers.hasSize; public abstract class GroupingAggregatorFunctionTestCase extends ForkingOperatorTestCase { - - @Override - protected DriverContext driverContext() { - return breakingDriverContext(); - } - protected abstract AggregatorFunctionSupplier aggregatorFunction(BigArrays bigArrays, List inputChannels); protected final int aggregatorIntermediateBlockCount() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index b08bda4a71d38..ea428d7d87cad 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -14,9 +14,9 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.ResultPageSinkOperator; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -57,7 +57,7 @@ public void testOverflowSucceeds() { driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(Double.MAX_VALUE - 1, 2)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { @@ -78,7 +78,7 @@ public void testSummationAccuracy() { DoubleStream.of(0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7) ), List.of(simple(nonBreakingBigArrays()).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { @@ -104,7 +104,7 @@ public void testSummationAccuracy() { driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(values)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { @@ -126,7 +126,7 @@ public void testSummationAccuracy() { driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { @@ -145,7 +145,7 @@ public void testSummationAccuracy() { driverContext, new SequenceDoubleBlockSourceOperator(driverContext.blockFactory(), DoubleStream.of(largeValues)), List.of(simple(nonBreakingBigArrays()).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 4cd2c717eb57a..5dfba49b404e8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -11,22 +11,24 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.MultivalueDedupeTests; import org.elasticsearch.core.Releasables; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ListMatcher; -import org.junit.After; import java.util.ArrayList; import java.util.Comparator; @@ -45,11 +47,6 @@ //@TestLogging(value = "org.elasticsearch.compute:TRACE", reason = "debug") public class BlockHashRandomizedTests extends ESTestCase { - - final CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); - final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - final MockBlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); - @ParametersFactory public static List params() { List params = new ArrayList<>(); @@ -101,19 +98,33 @@ public BlockHashRandomizedTests( this.allowedTypes = allowedTypes; } - @After - public void checkBreaker() { - assertThat(breaker.getUsed(), is(0L)); + public void test() { + CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); + test(new MockBlockFactory(breaker, bigArrays)); } - public void test() { + public void testWithCranky() { + CircuitBreakerService service = new CrankyCircuitBreakerService(); + CircuitBreaker breaker = service.getBreaker(CircuitBreaker.REQUEST); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, service); + try { + test(new MockBlockFactory(breaker, bigArrays)); + logger.info("cranky let us finish!"); + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void test(MockBlockFactory blockFactory) { List types = randomList(groups, groups, () -> randomFrom(allowedTypes)); BasicBlockTests.RandomBlock[] randomBlocks = new BasicBlockTests.RandomBlock[types.size()]; Block[] blocks = new Block[types.size()]; int pageCount = between(1, 10); int positionCount = 100; int emitBatchSize = 100; - try (BlockHash blockHash = newBlockHash(emitBatchSize, types)) { + try (BlockHash blockHash = newBlockHash(blockFactory, emitBatchSize, types)) { /* * Only the long/long, long/bytes_ref, and bytes_ref/long implementations don't collect nulls. */ @@ -146,7 +157,6 @@ public void test() { assertThat(ordsAndKeys.ords().getTotalValueCount(), lessThanOrEqualTo(emitBatchSize)); } batchCount[0]++; - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty().asBlock()); }, blocks); if (usingSingle) { assertThat(batchCount[0], equalTo(1)); @@ -184,14 +194,15 @@ public void test() { blockFactory.ensureAllBlocksAreReleased(); } } + assertThat(blockFactory.breaker().getUsed(), is(0L)); } - private BlockHash newBlockHash(int emitBatchSize, List types) { + private BlockHash newBlockHash(BlockFactory blockFactory, int emitBatchSize, List types) { List specs = new ArrayList<>(types.size()); for (int c = 0; c < types.size(); c++) { specs.add(new HashAggregationOperator.GroupSpec(c, types.get(c))); } - DriverContext driverContext = new DriverContext(bigArrays, blockFactory); + DriverContext driverContext = new DriverContext(blockFactory.bigArrays(), blockFactory); return forcePackedHash ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) : BlockHash.build(specs, driverContext, emitBatchSize, true); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index 86c7f64dc3816..567f58d0dee75 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -7,11 +7,11 @@ package org.elasticsearch.compute.aggregation.blockhash; +import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.inject.name.Named; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; @@ -25,6 +25,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; @@ -55,7 +56,7 @@ public class BlockHashTests extends ESTestCase { final CircuitBreaker breaker = new MockBigArrays.LimitedBreaker("esql-test-breaker", ByteSizeValue.ofGb(1)); final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, mockBreakerService(breaker)); - final BlockFactory blockFactory = BlockFactory.getInstance(breaker, bigArrays); + final MockBlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); @ParametersFactory public static List params() { @@ -67,603 +68,565 @@ public static List params() { @After public void checkBreaker() { + blockFactory.ensureAllBlocksAreReleased(); assertThat(breaker.getUsed(), is(0L)); } private final boolean forcePackedHash; - public BlockHashTests(@Named("forcePackedHash") boolean forcePackedHash) { + public BlockHashTests(@Name("forcePackedHash") boolean forcePackedHash) { this.forcePackedHash = forcePackedHash; } public void testIntHash() { int[] values = new int[] { 1, 2, 3, 1, 2, 3, 1, 2, 3 }; - IntBlock block = BlockFactory.getNonBreakingInstance().newIntArrayVector(values, values.length).asBlock(); - - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 1, 2, 0, 1, 2); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - } else { - assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=3, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 2, 3, 1, 2, 3); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 4))); - } - assertKeys(ordsAndKeys.keys, 1, 2, 3); - // we close these explicitly in the test. In common operation the operator is in charge of closing these. - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=3, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 1, 2, 0, 1, 2); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + } else { + assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=3, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 2, 3, 1, 2, 3); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 4))); + } + assertKeys(ordsAndKeys.keys, 1, 2, 3); + }, blockFactory.newIntArrayVector(values, values.length).asBlock()); } public void testIntHashWithNulls() { - IntBlock.Builder builder = BlockFactory.getNonBreakingInstance().newIntBlockBuilder(4); - builder.appendInt(0); - builder.appendNull(); - builder.appendInt(2); - builder.appendNull(); - - OrdsAndKeys ordsAndKeys = hash(builder.build()); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); - assertKeys(ordsAndKeys.keys, 0, null, 2); - } else { - assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=2, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); - assertKeys(ordsAndKeys.keys, null, 0, 2); + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(4)) { + builder.appendInt(0); + builder.appendNull(); + builder.appendInt(2); + builder.appendNull(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=3, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); + assertKeys(ordsAndKeys.keys, 0, null, 2); + } else { + assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=2, seenNull=true}")); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); + assertKeys(ordsAndKeys.keys, null, 0, 2); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testIntHashWithMultiValuedFields() { - var builder = BlockFactory.getNonBreakingInstance().newIntBlockBuilder(8); - builder.appendInt(1); - builder.beginPositionEntry(); - builder.appendInt(1); - builder.appendInt(2); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendInt(3); - builder.appendInt(1); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendInt(3); - builder.appendInt(3); - builder.endPositionEntry(); - builder.appendNull(); - builder.beginPositionEntry(); - builder.appendInt(3); - builder.appendInt(2); - builder.appendInt(1); - builder.endPositionEntry(); - - OrdsAndKeys ordsAndKeys = hash(builder.build()); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=4, size=")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0 }, - new int[] { 0, 1 }, - new int[] { 2, 0 }, - new int[] { 2 }, - new int[] { 3 }, - new int[] { 2, 1, 0 } - ); - assertKeys(ordsAndKeys.keys, 1, 2, 3, null); - } else { - assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=3, seenNull=true}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 1 }, - new int[] { 1, 2 }, - new int[] { 3, 1 }, - new int[] { 3 }, - new int[] { 0 }, - new int[] { 3, 2, 1 } - ); - assertKeys(ordsAndKeys.keys, null, 1, 2, 3); + try (IntBlock.Builder builder = blockFactory.newIntBlockBuilder(8)) { + builder.appendInt(1); + builder.beginPositionEntry(); + builder.appendInt(1); + builder.appendInt(2); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendInt(3); + builder.appendInt(1); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendInt(3); + builder.appendInt(3); + builder.endPositionEntry(); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendInt(3); + builder.appendInt(2); + builder.appendInt(1); + builder.endPositionEntry(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT], entries=4, size=")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 2, 0 }, + new int[] { 2 }, + new int[] { 3 }, + new int[] { 2, 1, 0 } + ); + assertKeys(ordsAndKeys.keys, 1, 2, 3, null); + } else { + assertThat(ordsAndKeys.description, equalTo("IntBlockHash{channel=0, entries=3, seenNull=true}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 3, 1 }, + new int[] { 3 }, + new int[] { 0 }, + new int[] { 3, 2, 1 } + ); + assertKeys(ordsAndKeys.keys, null, 1, 2, 3); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testLongHash() { long[] values = new long[] { 2, 1, 4, 2, 4, 1, 3, 4 }; - LongBlock block = BlockFactory.getNonBreakingInstance().newLongArrayVector(values, values.length).asBlock(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - } else { - assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=4, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); - } - assertKeys(ordsAndKeys.keys, 2L, 1L, 4L, 3L); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + } else { + assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=4, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); + } + assertKeys(ordsAndKeys.keys, 2L, 1L, 4L, 3L); + }, blockFactory.newLongArrayVector(values, values.length).asBlock()); } public void testLongHashWithNulls() { - LongBlock.Builder builder = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(4); - builder.appendLong(0); - builder.appendNull(); - builder.appendLong(2); - builder.appendNull(); - - OrdsAndKeys ordsAndKeys = hash(builder.build()); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); - assertKeys(ordsAndKeys.keys, 0L, null, 2L); - } else { - assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=2, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); - assertKeys(ordsAndKeys.keys, null, 0L, 2L); + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(4)) { + builder.appendLong(0); + builder.appendNull(); + builder.appendLong(2); + builder.appendNull(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=3, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); + assertKeys(ordsAndKeys.keys, 0L, null, 2L); + } else { + assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=2, seenNull=true}")); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); + assertKeys(ordsAndKeys.keys, null, 0L, 2L); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testLongHashWithMultiValuedFields() { - var builder = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(8); - builder.appendLong(1); - builder.beginPositionEntry(); - builder.appendLong(1); - builder.appendLong(2); - builder.appendLong(3); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendLong(1); - builder.appendLong(1); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendLong(3); - builder.endPositionEntry(); - builder.appendNull(); - builder.beginPositionEntry(); - builder.appendLong(3); - builder.appendLong(2); - builder.appendLong(1); - builder.endPositionEntry(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0 }, - new int[] { 0, 1, 2 }, - new int[] { 0 }, - new int[] { 2 }, - new int[] { 3 }, - new int[] { 2, 1, 0 } - ); - assertKeys(ordsAndKeys.keys, 1L, 2L, 3L, null); - } else { - assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=3, seenNull=true}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 1 }, - new int[] { 1, 2, 3 }, - new int[] { 1 }, - new int[] { 3 }, - new int[] { 0 }, - new int[] { 3, 2, 1 } - ); - assertKeys(ordsAndKeys.keys, null, 1L, 2L, 3L); + try (LongBlock.Builder builder = blockFactory.newLongBlockBuilder(8)) { + builder.appendLong(1); + builder.beginPositionEntry(); + builder.appendLong(1); + builder.appendLong(2); + builder.appendLong(3); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendLong(1); + builder.appendLong(1); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendLong(3); + builder.endPositionEntry(); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendLong(3); + builder.appendLong(2); + builder.appendLong(1); + builder.endPositionEntry(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG], entries=4, size=")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0 }, + new int[] { 0, 1, 2 }, + new int[] { 0 }, + new int[] { 2 }, + new int[] { 3 }, + new int[] { 2, 1, 0 } + ); + assertKeys(ordsAndKeys.keys, 1L, 2L, 3L, null); + } else { + assertThat(ordsAndKeys.description, equalTo("LongBlockHash{channel=0, entries=3, seenNull=true}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 1 }, + new int[] { 1, 2, 3 }, + new int[] { 1 }, + new int[] { 3 }, + new int[] { 0 }, + new int[] { 3, 2, 1 } + ); + assertKeys(ordsAndKeys.keys, null, 1L, 2L, 3L); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testDoubleHash() { double[] values = new double[] { 2.0, 1.0, 4.0, 2.0, 4.0, 1.0, 3.0, 4.0 }; - DoubleBlock block = BlockFactory.getNonBreakingInstance().newDoubleArrayVector(values, values.length).asBlock(); - OrdsAndKeys ordsAndKeys = hash(block); - - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - } else { - assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=4, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); - } - assertKeys(ordsAndKeys.keys, 2.0, 1.0, 4.0, 3.0); - // we close these explicitly in the test. In common operation the operator is in charge of closing these. - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + } else { + assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=4, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); + } + assertKeys(ordsAndKeys.keys, 2.0, 1.0, 4.0, 3.0); + }, blockFactory.newDoubleArrayVector(values, values.length).asBlock()); } public void testDoubleHashWithNulls() { - DoubleBlock.Builder builder = BlockFactory.getNonBreakingInstance().newDoubleBlockBuilder(4); - builder.appendDouble(0); - builder.appendNull(); - builder.appendDouble(2); - builder.appendNull(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); - assertKeys(ordsAndKeys.keys, 0.0, null, 2.0); - } else { - assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=2, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); - assertKeys(ordsAndKeys.keys, null, 0.0, 2.0); + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(4)) { + builder.appendDouble(0); + builder.appendNull(); + builder.appendDouble(2); + builder.appendNull(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=3, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); + assertKeys(ordsAndKeys.keys, 0.0, null, 2.0); + } else { + assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=2, seenNull=true}")); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); + assertKeys(ordsAndKeys.keys, null, 0.0, 2.0); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testDoubleHashWithMultiValuedFields() { - var builder = BlockFactory.getNonBreakingInstance().newDoubleBlockBuilder(8); - builder.appendDouble(1); - builder.beginPositionEntry(); - builder.appendDouble(2); - builder.appendDouble(3); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendDouble(3); - builder.appendDouble(2); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendDouble(1); - builder.endPositionEntry(); - builder.appendNull(); - builder.beginPositionEntry(); - builder.appendDouble(1); - builder.appendDouble(1); - builder.appendDouble(2); - builder.endPositionEntry(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0 }, - new int[] { 1, 2 }, - new int[] { 2, 1 }, - new int[] { 0 }, - new int[] { 3 }, - new int[] { 0, 1 } - ); - assertKeys(ordsAndKeys.keys, 1.0, 2.0, 3.0, null); - } else { - assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=3, seenNull=true}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 1 }, - new int[] { 2, 3 }, - new int[] { 3, 2 }, - new int[] { 1 }, - new int[] { 0 }, - new int[] { 1, 2 } - ); - assertKeys(ordsAndKeys.keys, null, 1.0, 2.0, 3.0); + try (DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(8)) { + builder.appendDouble(1); + builder.beginPositionEntry(); + builder.appendDouble(2); + builder.appendDouble(3); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendDouble(3); + builder.appendDouble(2); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendDouble(1); + builder.endPositionEntry(); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendDouble(1); + builder.appendDouble(1); + builder.appendDouble(2); + builder.endPositionEntry(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:DOUBLE], entries=4, size=")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0 }, + new int[] { 1, 2 }, + new int[] { 2, 1 }, + new int[] { 0 }, + new int[] { 3 }, + new int[] { 0, 1 } + ); + assertKeys(ordsAndKeys.keys, 1.0, 2.0, 3.0, null); + } else { + assertThat(ordsAndKeys.description, equalTo("DoubleBlockHash{channel=0, entries=3, seenNull=true}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 1 }, + new int[] { 2, 3 }, + new int[] { 3, 2 }, + new int[] { 1 }, + new int[] { 0 }, + new int[] { 1, 2 } + ); + assertKeys(ordsAndKeys.keys, null, 1.0, 2.0, 3.0); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testBasicBytesRefHash() { - var builder = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(8); - builder.appendBytesRef(new BytesRef("item-2")); - builder.appendBytesRef(new BytesRef("item-1")); - builder.appendBytesRef(new BytesRef("item-4")); - builder.appendBytesRef(new BytesRef("item-2")); - builder.appendBytesRef(new BytesRef("item-4")); - builder.appendBytesRef(new BytesRef("item-1")); - builder.appendBytesRef(new BytesRef("item-3")); - builder.appendBytesRef(new BytesRef("item-4")); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - } else { - assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=4, size=")); - assertThat(ordsAndKeys.description, endsWith("b, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(8)) { + builder.appendBytesRef(new BytesRef("item-2")); + builder.appendBytesRef(new BytesRef("item-1")); + builder.appendBytesRef(new BytesRef("item-4")); + builder.appendBytesRef(new BytesRef("item-2")); + builder.appendBytesRef(new BytesRef("item-4")); + builder.appendBytesRef(new BytesRef("item-1")); + builder.appendBytesRef(new BytesRef("item-3")); + builder.appendBytesRef(new BytesRef("item-4")); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 0, 2, 1, 3, 2); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + } else { + assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 1, 2, 3, 1, 3, 2, 4, 3); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 5))); + } + assertKeys(ordsAndKeys.keys, "item-2", "item-1", "item-4", "item-3"); + }, builder); } - assertKeys(ordsAndKeys.keys, "item-2", "item-1", "item-4", "item-3"); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testBytesRefHashWithNulls() { - BytesRefBlock.Builder builder = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(4); - builder.appendBytesRef(new BytesRef("cat")); - builder.appendNull(); - builder.appendBytesRef(new BytesRef("dog")); - builder.appendNull(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=3, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); - assertKeys(ordsAndKeys.keys, "cat", null, "dog"); - } else { - assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=2, size=")); - assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); - assertKeys(ordsAndKeys.keys, null, "cat", "dog"); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(4)) { + builder.appendBytesRef(new BytesRef("cat")); + builder.appendNull(); + builder.appendBytesRef(new BytesRef("dog")); + builder.appendNull(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=3, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); + assertKeys(ordsAndKeys.keys, "cat", null, "dog"); + } else { + assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=2, size=")); + assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); + assertKeys(ordsAndKeys.keys, null, "cat", "dog"); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testBytesRefHashWithMultiValuedFields() { - var builder = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(8); - builder.appendBytesRef(new BytesRef("foo")); - builder.beginPositionEntry(); - builder.appendBytesRef(new BytesRef("foo")); - builder.appendBytesRef(new BytesRef("bar")); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendBytesRef(new BytesRef("bar")); - builder.appendBytesRef(new BytesRef("bort")); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendBytesRef(new BytesRef("bort")); - builder.appendBytesRef(new BytesRef("bar")); - builder.endPositionEntry(); - builder.appendNull(); - builder.beginPositionEntry(); - builder.appendBytesRef(new BytesRef("bort")); - builder.appendBytesRef(new BytesRef("bort")); - builder.appendBytesRef(new BytesRef("bar")); - builder.endPositionEntry(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0 }, - new int[] { 0, 1 }, - new int[] { 1, 2 }, - new int[] { 2, 1 }, - new int[] { 3 }, - new int[] { 2, 1 } - ); - assertKeys(ordsAndKeys.keys, "foo", "bar", "bort", null); - } else { - assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=3, size=")); - assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 1 }, - new int[] { 1, 2 }, - new int[] { 2, 3 }, - new int[] { 3, 2 }, - new int[] { 0 }, - new int[] { 3, 2 } - ); - assertKeys(ordsAndKeys.keys, null, "foo", "bar", "bort"); + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(8)) { + builder.appendBytesRef(new BytesRef("foo")); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("foo")); + builder.appendBytesRef(new BytesRef("bar")); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("bar")); + builder.appendBytesRef(new BytesRef("bort")); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("bort")); + builder.appendBytesRef(new BytesRef("bar")); + builder.endPositionEntry(); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("bort")); + builder.appendBytesRef(new BytesRef("bort")); + builder.appendBytesRef(new BytesRef("bar")); + builder.endPositionEntry(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BYTES_REF], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 1, 2 }, + new int[] { 2, 1 }, + new int[] { 3 }, + new int[] { 2, 1 } + ); + assertKeys(ordsAndKeys.keys, "foo", "bar", "bort", null); + } else { + assertThat(ordsAndKeys.description, startsWith("BytesRefBlockHash{channel=0, entries=3, size=")); + assertThat(ordsAndKeys.description, endsWith("b, seenNull=true}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 2, 3 }, + new int[] { 3, 2 }, + new int[] { 0 }, + new int[] { 3, 2 } + ); + assertKeys(ordsAndKeys.keys, null, "foo", "bar", "bort"); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testBooleanHashFalseFirst() { boolean[] values = new boolean[] { false, true, true, true, true }; - BooleanBlock block = BlockFactory.getNonBreakingInstance().newBooleanArrayVector(values, values.length).asBlock(); - - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=2, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 1, 1, 1); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); - } else { - assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1, 2, 2, 2, 2); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 3))); - } - assertKeys(ordsAndKeys.keys, false, true); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=2, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 1, 1, 1); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); + } else { + assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 1, 2, 2, 2, 2); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 3))); + } + assertKeys(ordsAndKeys.keys, false, true); + }, blockFactory.newBooleanArrayVector(values, values.length).asBlock()); } public void testBooleanHashTrueFirst() { boolean[] values = new boolean[] { true, false, false, true, true }; - BooleanBlock block = BlockFactory.getNonBreakingInstance().newBooleanArrayVector(values, values.length).asBlock(); - - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=2, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 1, 0, 0); - assertKeys(ordsAndKeys.keys, true, false); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); - } else { - assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 2, 1, 1, 2, 2); - assertKeys(ordsAndKeys.keys, false, true); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 3))); - } - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=2, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 1, 0, 0); + assertKeys(ordsAndKeys.keys, true, false); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); + } else { + assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 2, 1, 1, 2, 2); + assertKeys(ordsAndKeys.keys, false, true); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(1, 3))); + } + }, blockFactory.newBooleanArrayVector(values, values.length).asBlock()); } public void testBooleanHashTrueOnly() { boolean[] values = new boolean[] { true, true, true, true }; - BooleanBlock block = BlockFactory.getNonBreakingInstance().newBooleanArrayVector(values, values.length).asBlock(); - - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); - assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); - assertKeys(ordsAndKeys.keys, true); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); - } else { - assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=false, seenTrue=true, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 2, 2, 2, 2); - assertKeys(ordsAndKeys.keys, true); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(2).build())); - } - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); + assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); + assertKeys(ordsAndKeys.keys, true); + assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); + } else { + assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=false, seenTrue=true, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 2, 2, 2, 2); + assertKeys(ordsAndKeys.keys, true); + assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(2).build())); + } + }, blockFactory.newBooleanArrayVector(values, values.length).asBlock()); } public void testBooleanHashFalseOnly() { boolean[] values = new boolean[] { false, false, false, false }; - BooleanBlock block = BlockFactory.getNonBreakingInstance().newBooleanArrayVector(values, values.length).asBlock(); - - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); - assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); - } else { - assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=false, seenNull=false}")); - assertOrds(ordsAndKeys.ords, 1, 1, 1, 1); - assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(1).build())); - } - assertKeys(ordsAndKeys.keys, false); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=1, size=")); + assertOrds(ordsAndKeys.ords, 0, 0, 0, 0); + assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(0).build())); + } else { + assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=false, seenNull=false}")); + assertOrds(ordsAndKeys.ords, 1, 1, 1, 1); + assertThat(ordsAndKeys.nonEmpty, equalTo(IntVector.newVectorBuilder(1).appendInt(1).build())); + } + assertKeys(ordsAndKeys.keys, false); + }, blockFactory.newBooleanArrayVector(values, values.length).asBlock()); } public void testBooleanHashWithNulls() { - BooleanBlock.Builder builder = BlockFactory.getNonBreakingInstance().newBooleanBlockBuilder(4); - builder.appendBoolean(false); - builder.appendNull(); - builder.appendBoolean(true); - builder.appendNull(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); - assertKeys(ordsAndKeys.keys, false, null, true); - } else { - assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=true}")); - assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); - assertKeys(ordsAndKeys.keys, null, false, true); + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(4)) { + builder.appendBoolean(false); + builder.appendNull(); + builder.appendBoolean(true); + builder.appendNull(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 1); + assertKeys(ordsAndKeys.keys, false, null, true); + } else { + assertThat( + ordsAndKeys.description, + equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=true}") + ); + assertOrds(ordsAndKeys.ords, 1, 0, 2, 0); + assertKeys(ordsAndKeys.keys, null, false, true); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testBooleanHashWithMultiValuedFields() { - var builder = BlockFactory.getNonBreakingInstance().newBooleanBlockBuilder(8); - builder.appendBoolean(false); - builder.beginPositionEntry(); - builder.appendBoolean(false); - builder.appendBoolean(true); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendBoolean(true); - builder.appendBoolean(false); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendBoolean(true); - builder.endPositionEntry(); - builder.appendNull(); - builder.beginPositionEntry(); - builder.appendBoolean(true); - builder.appendBoolean(true); - builder.appendBoolean(false); - builder.endPositionEntry(); - - Block block = builder.build(); - OrdsAndKeys ordsAndKeys = hash(block); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0 }, - new int[] { 0, 1 }, - new int[] { 0, 1 }, // Order is not preserved - new int[] { 1 }, - new int[] { 2 }, - new int[] { 0, 1 } - ); - assertKeys(ordsAndKeys.keys, false, true, null); - } else { - assertThat(ordsAndKeys.description, equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=true}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 1 }, - new int[] { 1, 2 }, - new int[] { 1, 2 }, // Order is not preserved - new int[] { 2 }, - new int[] { 0 }, - new int[] { 1, 2 } - ); - assertKeys(ordsAndKeys.keys, null, false, true); + try (BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(8)) { + builder.appendBoolean(false); + builder.beginPositionEntry(); + builder.appendBoolean(false); + builder.appendBoolean(true); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBoolean(true); + builder.appendBoolean(false); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBoolean(true); + builder.endPositionEntry(); + builder.appendNull(); + builder.beginPositionEntry(); + builder.appendBoolean(true); + builder.appendBoolean(true); + builder.appendBoolean(false); + builder.endPositionEntry(); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:BOOLEAN], entries=3, size=")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0 }, + new int[] { 0, 1 }, + new int[] { 0, 1 }, // Order is not preserved + new int[] { 1 }, + new int[] { 2 }, + new int[] { 0, 1 } + ); + assertKeys(ordsAndKeys.keys, false, true, null); + } else { + assertThat( + ordsAndKeys.description, + equalTo("BooleanBlockHash{channel=0, seenFalse=true, seenTrue=true, seenNull=true}") + ); + assertOrds( + ordsAndKeys.ords, + new int[] { 1 }, + new int[] { 1, 2 }, + new int[] { 1, 2 }, // Order is not preserved + new int[] { 2 }, + new int[] { 0 }, + new int[] { 1, 2 } + ); + assertKeys(ordsAndKeys.keys, null, false, true); + } + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); + }, builder); } - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 3))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testLongLongHash() { long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; - LongBlock block1 = BlockFactory.getNonBreakingInstance().newLongArrayVector(values1, values1.length).asBlock(); long[] values2 = new long[] { 0, 0, 0, 1, 1, 1 }; - LongBlock block2 = BlockFactory.getNonBreakingInstance().newLongArrayVector(values2, values2.length).asBlock(); - Object[][] expectedKeys = { new Object[] { 0L, 0L }, new Object[] { 1L, 0L }, new Object[] { 1L, 1L }, new Object[] { 0L, 1L } }; - - OrdsAndKeys ordsAndKeys = hash(block1, block2); - assertThat( - ordsAndKeys.description, - forcePackedHash - ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=4, size=") - : equalTo("LongLongBlockHash{channels=[0,1], entries=4}") + hash(ordsAndKeys -> { + Object[][] expectedKeys = { + new Object[] { 0L, 0L }, + new Object[] { 1L, 0L }, + new Object[] { 1L, 1L }, + new Object[] { 0L, 1L } }; + + assertThat( + ordsAndKeys.description, + forcePackedHash + ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=4, size=") + : equalTo("LongLongBlockHash{channels=[0,1], entries=4}") + ); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); + assertKeys(ordsAndKeys.keys, expectedKeys); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, + blockFactory.newLongArrayVector(values1, values1.length).asBlock(), + blockFactory.newLongArrayVector(values2, values2.length).asBlock() ); - assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); - assertKeys(ordsAndKeys.keys, expectedKeys); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } private void append(LongBlock.Builder b1, LongBlock.Builder b2, long[] v1, long[] v2) { @@ -692,289 +655,282 @@ private void append(LongBlock.Builder b1, LongBlock.Builder b2, long[] v1, long[ } public void testLongLongHashWithMultiValuedFields() { - var b1 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(8); - var b2 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(8); - append(b1, b2, new long[] { 1, 2 }, new long[] { 10, 20 }); - append(b1, b2, new long[] { 1, 2 }, new long[] { 10 }); - append(b1, b2, new long[] { 1 }, new long[] { 10, 20 }); - append(b1, b2, new long[] { 1 }, new long[] { 10 }); - append(b1, b2, null, new long[] { 10 }); - append(b1, b2, new long[] { 1 }, null); - append(b1, b2, new long[] { 1, 1, 1 }, new long[] { 10, 10, 10 }); - append(b1, b2, new long[] { 1, 1, 2, 2 }, new long[] { 10, 20, 20 }); - append(b1, b2, new long[] { 1, 2, 3 }, new long[] { 30, 30, 10 }); - - Block block1 = b1.build(); - Block block2 = b2.build(); - OrdsAndKeys ordsAndKeys = hash(block1, block2); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=10, size=")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0, 1, 2, 3 }, - new int[] { 0, 2 }, - new int[] { 0, 1 }, - new int[] { 0 }, - new int[] { 4 }, - new int[] { 5 }, - new int[] { 0 }, - new int[] { 0, 1, 2, 3 }, - new int[] { 6, 0, 7, 2, 8, 9 } - ); - assertKeys( - ordsAndKeys.keys, - new Object[][] { - new Object[] { 1L, 10L }, - new Object[] { 1L, 20L }, - new Object[] { 2L, 10L }, - new Object[] { 2L, 20L }, - new Object[] { null, 10L }, - new Object[] { 1L, null }, - new Object[] { 1L, 30L }, - new Object[] { 2L, 30L }, - new Object[] { 3L, 30L }, - new Object[] { 3L, 10L }, } - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 10))); - } else { - assertThat(ordsAndKeys.description, equalTo("LongLongBlockHash{channels=[0,1], entries=8}")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0, 1, 2, 3 }, - new int[] { 0, 2 }, - new int[] { 0, 1 }, - new int[] { 0 }, - null, - null, - new int[] { 0 }, - new int[] { 0, 1, 2, 3 }, - new int[] { 4, 0, 5, 2, 6, 7 } - ); - assertKeys( - ordsAndKeys.keys, - new Object[][] { - new Object[] { 1L, 10L }, - new Object[] { 1L, 20L }, - new Object[] { 2L, 10L }, - new Object[] { 2L, 20L }, - new Object[] { 1L, 30L }, - new Object[] { 2L, 30L }, - new Object[] { 3L, 30L }, - new Object[] { 3L, 10L }, } - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 8))); + try (LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(8); LongBlock.Builder b2 = blockFactory.newLongBlockBuilder(8)) { + append(b1, b2, new long[] { 1, 2 }, new long[] { 10, 20 }); + append(b1, b2, new long[] { 1, 2 }, new long[] { 10 }); + append(b1, b2, new long[] { 1 }, new long[] { 10, 20 }); + append(b1, b2, new long[] { 1 }, new long[] { 10 }); + append(b1, b2, null, new long[] { 10 }); + append(b1, b2, new long[] { 1 }, null); + append(b1, b2, new long[] { 1, 1, 1 }, new long[] { 10, 10, 10 }); + append(b1, b2, new long[] { 1, 1, 2, 2 }, new long[] { 10, 20, 20 }); + append(b1, b2, new long[] { 1, 2, 3 }, new long[] { 30, 30, 10 }); + + hash(ordsAndKeys -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=10, size=")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 2 }, + new int[] { 0, 1 }, + new int[] { 0 }, + new int[] { 4 }, + new int[] { 5 }, + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 6, 0, 7, 2, 8, 9 } + ); + assertKeys( + ordsAndKeys.keys, + new Object[][] { + new Object[] { 1L, 10L }, + new Object[] { 1L, 20L }, + new Object[] { 2L, 10L }, + new Object[] { 2L, 20L }, + new Object[] { null, 10L }, + new Object[] { 1L, null }, + new Object[] { 1L, 30L }, + new Object[] { 2L, 30L }, + new Object[] { 3L, 30L }, + new Object[] { 3L, 10L }, } + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 10))); + } else { + assertThat(ordsAndKeys.description, equalTo("LongLongBlockHash{channels=[0,1], entries=8}")); + assertOrds( + ordsAndKeys.ords, + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 2 }, + new int[] { 0, 1 }, + new int[] { 0 }, + null, + null, + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 4, 0, 5, 2, 6, 7 } + ); + assertKeys( + ordsAndKeys.keys, + new Object[][] { + new Object[] { 1L, 10L }, + new Object[] { 1L, 20L }, + new Object[] { 2L, 10L }, + new Object[] { 2L, 20L }, + new Object[] { 1L, 30L }, + new Object[] { 2L, 30L }, + new Object[] { 3L, 30L }, + new Object[] { 3L, 10L }, } + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 8))); + } + }, b1, b2); } - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testLongLongHashHugeCombinatorialExplosion() { long[] v1 = LongStream.range(0, 5000).toArray(); long[] v2 = LongStream.range(100, 200).toArray(); - var b1 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(v1.length); - var b2 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(v2.length); - append(b1, b2, v1, v2); - var block1 = b1.build(); - var block2 = b2.build(); - - int[] expectedEntries = new int[1]; - int pageSize = between(1000, 16 * 1024); - hash(ordsAndKeys -> { - int start = expectedEntries[0]; - expectedEntries[0] = Math.min(expectedEntries[0] + pageSize, v1.length * v2.length); - assertThat( - ordsAndKeys.description, - forcePackedHash - ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=" + expectedEntries[0] + ", size=") - : equalTo("LongLongBlockHash{channels=[0,1], entries=" + expectedEntries[0] + "}") - ); - assertOrds(ordsAndKeys.ords, IntStream.range(start, expectedEntries[0]).toArray()); - assertKeys( - ordsAndKeys.keys, - IntStream.range(0, expectedEntries[0]) - .mapToObj(i -> new Object[] { v1[i / v2.length], v2[i % v2.length] }) - .toArray(l -> new Object[l][]) - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, expectedEntries[0]))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - }, pageSize, block1, block2); + try ( + LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(v1.length); + LongBlock.Builder b2 = blockFactory.newLongBlockBuilder(v2.length) + ) { + append(b1, b2, v1, v2); + int[] expectedEntries = new int[1]; + int pageSize = between(1000, 16 * 1024); + hash(ordsAndKeys -> { + int start = expectedEntries[0]; + expectedEntries[0] = Math.min(expectedEntries[0] + pageSize, v1.length * v2.length); + assertThat( + ordsAndKeys.description, + forcePackedHash + ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=" + expectedEntries[0] + ", size=") + : equalTo("LongLongBlockHash{channels=[0,1], entries=" + expectedEntries[0] + "}") + ); + assertOrds(ordsAndKeys.ords, IntStream.range(start, expectedEntries[0]).toArray()); + assertKeys( + ordsAndKeys.keys, + IntStream.range(0, expectedEntries[0]) + .mapToObj(i -> new Object[] { v1[i / v2.length], v2[i % v2.length] }) + .toArray(l -> new Object[l][]) + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, expectedEntries[0]))); + }, pageSize, b1, b2); - assertThat("misconfigured test", expectedEntries[0], greaterThan(0)); - assertThat(breaker.getUsed(), is(0L)); + assertThat("misconfigured test", expectedEntries[0], greaterThan(0)); + } } public void testIntLongHash() { int[] values1 = new int[] { 0, 1, 0, 1, 0, 1 }; - IntBlock block1 = BlockFactory.getNonBreakingInstance().newIntArrayVector(values1, values1.length).asBlock(); long[] values2 = new long[] { 0, 0, 0, 1, 1, 1 }; - LongBlock block2 = BlockFactory.getNonBreakingInstance().newLongArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0, 0L }, new Object[] { 1, 0L }, new Object[] { 1, 1L }, new Object[] { 0, 1L } }; - - OrdsAndKeys ordsAndKeys = hash(block1, block2); - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT, 1:LONG], entries=4, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); - assertKeys(ordsAndKeys.keys, expectedKeys); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash(ordsAndKeys -> { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT, 1:LONG], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); + assertKeys(ordsAndKeys.keys, expectedKeys); + }, + blockFactory.newIntArrayVector(values1, values1.length).asBlock(), + blockFactory.newLongArrayVector(values2, values2.length).asBlock() + ); } public void testLongDoubleHash() { long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; - LongBlock block1 = BlockFactory.getNonBreakingInstance().newLongArrayVector(values1, values1.length).asBlock(); double[] values2 = new double[] { 0, 0, 0, 1, 1, 1 }; - DoubleBlock block2 = BlockFactory.getNonBreakingInstance().newDoubleArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0L, 0d }, new Object[] { 1L, 0d }, new Object[] { 1L, 1d }, new Object[] { 0L, 1d } }; - OrdsAndKeys ordsAndKeys = hash(block1, block2); - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:DOUBLE], entries=4, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); - assertKeys(ordsAndKeys.keys, expectedKeys); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash((OrdsAndKeys ordsAndKeys) -> { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:DOUBLE], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); + assertKeys(ordsAndKeys.keys, expectedKeys); + }, + blockFactory.newLongArrayVector(values1, values1.length).asBlock(), + blockFactory.newDoubleArrayVector(values2, values2.length).asBlock() + ); } public void testIntBooleanHash() { int[] values1 = new int[] { 0, 1, 0, 1, 0, 1 }; - IntBlock block1 = BlockFactory.getNonBreakingInstance().newIntArrayVector(values1, values1.length).asBlock(); boolean[] values2 = new boolean[] { false, false, false, true, true, true }; - BooleanBlock block2 = BlockFactory.getNonBreakingInstance().newBooleanArrayVector(values2, values2.length).asBlock(); Object[][] expectedKeys = { new Object[] { 0, false }, new Object[] { 1, false }, new Object[] { 1, true }, new Object[] { 0, true } }; - - OrdsAndKeys ordsAndKeys = hash(block1, block2); - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT, 1:BOOLEAN], entries=4, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); - assertKeys(ordsAndKeys.keys, expectedKeys); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + hash((OrdsAndKeys ordsAndKeys) -> { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:INT, 1:BOOLEAN], entries=4, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); + assertKeys(ordsAndKeys.keys, expectedKeys); + }, + blockFactory.newIntArrayVector(values1, values1.length).asBlock(), + blockFactory.newBooleanArrayVector(values2, values2.length).asBlock() + ); } public void testLongLongHashWithNull() { - LongBlock.Builder b1 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(2); - LongBlock.Builder b2 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(2); - b1.appendLong(1); - b2.appendLong(0); - b1.appendNull(); - b2.appendNull(); - b1.appendLong(0); - b2.appendLong(1); - b1.appendLong(0); - b2.appendNull(); - b1.appendNull(); - b2.appendLong(0); - - OrdsAndKeys ordsAndKeys = hash(b1.build(), b2.build()); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=5, size=")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 3, 4); - assertKeys( - ordsAndKeys.keys, - new Object[][] { - new Object[] { 1L, 0L }, - new Object[] { null, null }, - new Object[] { 0L, 1L }, - new Object[] { 0L, null }, - new Object[] { null, 0L }, } - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 5))); - } else { - assertThat(ordsAndKeys.description, equalTo("LongLongBlockHash{channels=[0,1], entries=2}")); - assertOrds(ordsAndKeys.ords, 0, null, 1, null, null); - assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { 1L, 0L }, new Object[] { 0L, 1L } }); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); + try (LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(2); LongBlock.Builder b2 = blockFactory.newLongBlockBuilder(2)) { + b1.appendLong(1); + b2.appendLong(0); + b1.appendNull(); + b2.appendNull(); + b1.appendLong(0); + b2.appendLong(1); + b1.appendLong(0); + b2.appendNull(); + b1.appendNull(); + b2.appendLong(0); + + hash((OrdsAndKeys ordsAndKeys) -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:LONG], entries=5, size=")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 3, 4); + assertKeys( + ordsAndKeys.keys, + new Object[][] { + new Object[] { 1L, 0L }, + new Object[] { null, null }, + new Object[] { 0L, 1L }, + new Object[] { 0L, null }, + new Object[] { null, 0L }, } + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 5))); + } else { + assertThat(ordsAndKeys.description, equalTo("LongLongBlockHash{channels=[0,1], entries=2}")); + assertOrds(ordsAndKeys.ords, 0, null, 1, null, null); + assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { 1L, 0L }, new Object[] { 0L, 1L } }); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); + } + }, b1, b2); } - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); } public void testLongBytesRefHash() { - long[] values1 = new long[] { 0, 1, 0, 1, 0, 1 }; - LongBlock block1 = BlockFactory.getNonBreakingInstance().newLongArrayVector(values1, values1.length).asBlock(); - BytesRefBlock.Builder builder = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(8); - builder.appendBytesRef(new BytesRef("cat")); - builder.appendBytesRef(new BytesRef("cat")); - builder.appendBytesRef(new BytesRef("cat")); - builder.appendBytesRef(new BytesRef("dog")); - builder.appendBytesRef(new BytesRef("dog")); - builder.appendBytesRef(new BytesRef("dog")); - BytesRefBlock block2 = builder.build(); - Object[][] expectedKeys = { - new Object[] { 0L, "cat" }, - new Object[] { 1L, "cat" }, - new Object[] { 1L, "dog" }, - new Object[] { 0L, "dog" } }; - - OrdsAndKeys ordsAndKeys = hash(block1, block2); - assertThat( - ordsAndKeys.description, - startsWith( - forcePackedHash - ? "PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=4, size=" - : "BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=4, size=" - ) - ); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); - assertKeys(ordsAndKeys.keys, expectedKeys); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); + try ( + LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(8); + BytesRefBlock.Builder b2 = blockFactory.newBytesRefBlockBuilder(8) + ) { + b1.appendLong(0); + b2.appendBytesRef(new BytesRef("cat")); + b1.appendLong(1); + b2.appendBytesRef(new BytesRef("cat")); + b1.appendLong(0); + b2.appendBytesRef(new BytesRef("cat")); + b1.appendLong(1); + b2.appendBytesRef(new BytesRef("dog")); + b1.appendLong(0); + b2.appendBytesRef(new BytesRef("dog")); + b1.appendLong(1); + b2.appendBytesRef(new BytesRef("dog")); + Object[][] expectedKeys = { + new Object[] { 0L, "cat" }, + new Object[] { 1L, "cat" }, + new Object[] { 1L, "dog" }, + new Object[] { 0L, "dog" } }; + + hash((OrdsAndKeys ordsAndKeys) -> { + assertThat( + ordsAndKeys.description, + startsWith( + forcePackedHash + ? "PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=4, size=" + : "BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=4, size=" + ) + ); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 0, 2, 3, 2); + assertKeys(ordsAndKeys.keys, expectedKeys); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 4))); + }, b1, b2); + } } public void testLongBytesRefHashWithNull() { - LongBlock.Builder b1 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(2); - BytesRefBlock.Builder b2 = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(2); - b1.appendLong(1); - b2.appendBytesRef(new BytesRef("cat")); - b1.appendNull(); - b2.appendNull(); - b1.appendLong(0); - b2.appendBytesRef(new BytesRef("dog")); - b1.appendLong(0); - b2.appendNull(); - b1.appendNull(); - b2.appendBytesRef(new BytesRef("vanish")); - - OrdsAndKeys ordsAndKeys = hash(b1.build(), b2.build()); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=5, size=")); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, 1, 2, 3, 4); - assertKeys( - ordsAndKeys.keys, - new Object[][] { - new Object[] { 1L, "cat" }, - new Object[] { null, null }, - new Object[] { 0L, "dog" }, - new Object[] { 1L, null }, - new Object[] { null, "vanish" } } - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 5))); - } else { - assertThat( - ordsAndKeys.description, - startsWith("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=2, size=") - ); - assertThat(ordsAndKeys.description, endsWith("b}")); - assertOrds(ordsAndKeys.ords, 0, null, 1, null, null); - assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { 1L, "cat" }, new Object[] { 0L, "dog" } }); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); + try ( + LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(2); + BytesRefBlock.Builder b2 = blockFactory.newBytesRefBlockBuilder(2) + ) { + b1.appendLong(1); + b2.appendBytesRef(new BytesRef("cat")); + b1.appendNull(); + b2.appendNull(); + b1.appendLong(0); + b2.appendBytesRef(new BytesRef("dog")); + b1.appendLong(0); + b2.appendNull(); + b1.appendNull(); + b2.appendBytesRef(new BytesRef("vanish")); + + hash((OrdsAndKeys ordsAndKeys) -> { + if (forcePackedHash) { + assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=5, size=")); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, 1, 2, 3, 4); + assertKeys( + ordsAndKeys.keys, + new Object[][] { + new Object[] { 1L, "cat" }, + new Object[] { null, null }, + new Object[] { 0L, "dog" }, + new Object[] { 1L, null }, + new Object[] { null, "vanish" } } + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 5))); + } else { + assertThat( + ordsAndKeys.description, + startsWith("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=2, size=") + ); + assertThat(ordsAndKeys.description, endsWith("b}")); + assertOrds(ordsAndKeys.ords, 0, null, 1, null, null); + assertKeys(ordsAndKeys.keys, new Object[][] { new Object[] { 1L, "cat" }, new Object[] { 0L, "dog" } }); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 2))); + } + }, b1, b2); } - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } private void append(LongBlock.Builder b1, BytesRefBlock.Builder b2, long[] v1, String[] v2) { @@ -1003,123 +959,128 @@ private void append(LongBlock.Builder b1, BytesRefBlock.Builder b2, long[] v1, S } public void testLongBytesRefHashWithMultiValuedFields() { - var b1 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(8); - var b2 = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(8); - append(b1, b2, new long[] { 1, 2 }, new String[] { "a", "b" }); - append(b1, b2, new long[] { 1, 2 }, new String[] { "a" }); - append(b1, b2, new long[] { 1 }, new String[] { "a", "b" }); - append(b1, b2, new long[] { 1 }, new String[] { "a" }); - append(b1, b2, null, new String[] { "a" }); - append(b1, b2, new long[] { 1 }, null); - append(b1, b2, new long[] { 1, 1, 1 }, new String[] { "a", "a", "a" }); - append(b1, b2, new long[] { 1, 1, 2, 2 }, new String[] { "a", "b", "b" }); - append(b1, b2, new long[] { 1, 2, 3 }, new String[] { "c", "c", "a" }); - - OrdsAndKeys ordsAndKeys = hash(b1.build(), b2.build()); - if (forcePackedHash) { - assertThat(ordsAndKeys.description, startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=10, size=")); - assertOrds( - ordsAndKeys.ords, - new int[] { 0, 1, 2, 3 }, - new int[] { 0, 2 }, - new int[] { 0, 1 }, - new int[] { 0 }, - new int[] { 4 }, - new int[] { 5 }, - new int[] { 0 }, - new int[] { 0, 1, 2, 3 }, - new int[] { 6, 0, 7, 2, 8, 9 } - ); - assertKeys( - ordsAndKeys.keys, - new Object[][] { - new Object[] { 1L, "a" }, - new Object[] { 1L, "b" }, - new Object[] { 2L, "a" }, - new Object[] { 2L, "b" }, - new Object[] { null, "a" }, - new Object[] { 1L, null }, - new Object[] { 1L, "c" }, - new Object[] { 2L, "c" }, - new Object[] { 3L, "c" }, - new Object[] { 3L, "a" }, } - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 10))); - } else { - assertThat( - ordsAndKeys.description, - equalTo("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=8, size=491b}") - ); - assertOrds( - ordsAndKeys.ords, - new int[] { 0, 1, 2, 3 }, - new int[] { 0, 1 }, - new int[] { 0, 2 }, - new int[] { 0 }, - null, - null, - new int[] { 0 }, - new int[] { 0, 1, 2, 3 }, - new int[] { 4, 5, 6, 0, 1, 7 } - ); - assertKeys( - ordsAndKeys.keys, - new Object[][] { - new Object[] { 1L, "a" }, - new Object[] { 2L, "a" }, - new Object[] { 1L, "b" }, - new Object[] { 2L, "b" }, - new Object[] { 1L, "c" }, - new Object[] { 2L, "c" }, - new Object[] { 3L, "c" }, - new Object[] { 3L, "a" }, } - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 8))); + try ( + LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(8); + BytesRefBlock.Builder b2 = blockFactory.newBytesRefBlockBuilder(8) + ) { + append(b1, b2, new long[] { 1, 2 }, new String[] { "a", "b" }); + append(b1, b2, new long[] { 1, 2 }, new String[] { "a" }); + append(b1, b2, new long[] { 1 }, new String[] { "a", "b" }); + append(b1, b2, new long[] { 1 }, new String[] { "a" }); + append(b1, b2, null, new String[] { "a" }); + append(b1, b2, new long[] { 1 }, null); + append(b1, b2, new long[] { 1, 1, 1 }, new String[] { "a", "a", "a" }); + append(b1, b2, new long[] { 1, 1, 2, 2 }, new String[] { "a", "b", "b" }); + append(b1, b2, new long[] { 1, 2, 3 }, new String[] { "c", "c", "a" }); + + hash((OrdsAndKeys ordsAndKeys) -> { + if (forcePackedHash) { + assertThat( + ordsAndKeys.description, + startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=10, size=") + ); + assertOrds( + ordsAndKeys.ords, + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 2 }, + new int[] { 0, 1 }, + new int[] { 0 }, + new int[] { 4 }, + new int[] { 5 }, + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 6, 0, 7, 2, 8, 9 } + ); + assertKeys( + ordsAndKeys.keys, + new Object[][] { + new Object[] { 1L, "a" }, + new Object[] { 1L, "b" }, + new Object[] { 2L, "a" }, + new Object[] { 2L, "b" }, + new Object[] { null, "a" }, + new Object[] { 1L, null }, + new Object[] { 1L, "c" }, + new Object[] { 2L, "c" }, + new Object[] { 3L, "c" }, + new Object[] { 3L, "a" }, } + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 10))); + } else { + assertThat( + ordsAndKeys.description, + equalTo("BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=8, size=491b}") + ); + assertOrds( + ordsAndKeys.ords, + new int[] { 0, 1, 2, 3 }, + new int[] { 0, 1 }, + new int[] { 0, 2 }, + new int[] { 0 }, + null, + null, + new int[] { 0 }, + new int[] { 0, 1, 2, 3 }, + new int[] { 4, 5, 6, 0, 1, 7 } + ); + assertKeys( + ordsAndKeys.keys, + new Object[][] { + new Object[] { 1L, "a" }, + new Object[] { 2L, "a" }, + new Object[] { 1L, "b" }, + new Object[] { 2L, "b" }, + new Object[] { 1L, "c" }, + new Object[] { 2L, "c" }, + new Object[] { 3L, "c" }, + new Object[] { 3L, "a" }, } + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, 8))); + } + }, b1, b2); } - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - assertThat(breaker.getUsed(), is(0L)); } public void testBytesRefLongHashHugeCombinatorialExplosion() { long[] v1 = LongStream.range(0, 3000).toArray(); String[] v2 = LongStream.range(100, 200).mapToObj(l -> "a" + l).toArray(String[]::new); - var b1 = BlockFactory.getNonBreakingInstance().newLongBlockBuilder(v1.length); - var b2 = BlockFactory.getNonBreakingInstance().newBytesRefBlockBuilder(v2.length); - append(b1, b2, v1, v2); - - int[] expectedEntries = new int[1]; - int pageSize = between(1000, 16 * 1024); - hash(ordsAndKeys -> { - int start = expectedEntries[0]; - expectedEntries[0] = Math.min(expectedEntries[0] + pageSize, v1.length * v2.length); - assertThat( - ordsAndKeys.description, - forcePackedHash - ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=" + expectedEntries[0] + ", size=") - : startsWith( - "BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=" + expectedEntries[0] + ", size=" - ) - ); - assertOrds(ordsAndKeys.ords, IntStream.range(start, expectedEntries[0]).toArray()); - assertKeys( - ordsAndKeys.keys, - IntStream.range(0, expectedEntries[0]) - .mapToObj( - i -> forcePackedHash - ? new Object[] { v1[i / v2.length], v2[i % v2.length] } - : new Object[] { v1[i % v1.length], v2[i / v1.length] } - ) - .toArray(l -> new Object[l][]) - ); - assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, expectedEntries[0]))); - Releasables.closeExpectNoException(ordsAndKeys.keys); - Releasables.closeExpectNoException(ordsAndKeys.nonEmpty); - }, pageSize, b1.build(), b2.build()); + try ( + LongBlock.Builder b1 = blockFactory.newLongBlockBuilder(v1.length); + BytesRefBlock.Builder b2 = blockFactory.newBytesRefBlockBuilder(v2.length); + ) { + append(b1, b2, v1, v2); + int[] expectedEntries = new int[1]; + int pageSize = between(1000, 16 * 1024); + hash(ordsAndKeys -> { + int start = expectedEntries[0]; + expectedEntries[0] = Math.min(expectedEntries[0] + pageSize, v1.length * v2.length); + assertThat( + ordsAndKeys.description, + forcePackedHash + ? startsWith("PackedValuesBlockHash{groups=[0:LONG, 1:BYTES_REF], entries=" + expectedEntries[0] + ", size=") + : startsWith( + "BytesRefLongBlockHash{keys=[BytesRefKey[channel=1], LongKey[channel=0]], entries=" + + expectedEntries[0] + + ", size=" + ) + ); + assertOrds(ordsAndKeys.ords, IntStream.range(start, expectedEntries[0]).toArray()); + assertKeys( + ordsAndKeys.keys, + IntStream.range(0, expectedEntries[0]) + .mapToObj( + i -> forcePackedHash + ? new Object[] { v1[i / v2.length], v2[i % v2.length] } + : new Object[] { v1[i % v1.length], v2[i / v1.length] } + ) + .toArray(l -> new Object[l][]) + ); + assertThat(ordsAndKeys.nonEmpty, equalTo(intRange(0, expectedEntries[0]))); + }, pageSize, b1, b2); - assertThat("misconfigured test", expectedEntries[0], greaterThan(0)); - assertThat(breaker.getUsed(), is(0L)); + assertThat("misconfigured test", expectedEntries[0], greaterThan(0)); + } } record OrdsAndKeys(String description, int positionOffset, IntBlock ords, Block[] keys, IntVector nonEmpty) {} @@ -1128,29 +1089,53 @@ record OrdsAndKeys(String description, int positionOffset, IntBlock ords, Block[ * Hash some values into a single block of group ids. If the hash produces * more than one block of group ids this will fail. */ - private OrdsAndKeys hash(Block... values) { - OrdsAndKeys[] result = new OrdsAndKeys[1]; + private void hash(Consumer callback, Block.Builder... values) { + Block[] blocks = new Block[values.length]; + for (int i = 0; i < blocks.length; i++) { + blocks[i] = values[i].build(); + } + hash(callback, blocks); + } + + /** + * Hash some values into a single block of group ids. If the hash produces + * more than one block of group ids this will fail. + */ + private void hash(Consumer callback, Block... values) { + boolean[] called = new boolean[] { false }; hash(ordsAndKeys -> { - if (result[0] != null) { + if (called[0]) { throw new IllegalStateException("hash produced more than one block"); } - result[0] = ordsAndKeys; + called[0] = true; + callback.accept(ordsAndKeys); }, 16 * 1024, values); - return result[0]; } - private void hash(Consumer callback, int emitBatchSize, Block... values) { - List specs = new ArrayList<>(values.length); - for (int c = 0; c < values.length; c++) { - specs.add(new HashAggregationOperator.GroupSpec(c, values[c].elementType())); + private void hash(Consumer callback, int emitBatchSize, Block.Builder... values) { + Block[] blocks = new Block[values.length]; + for (int i = 0; i < blocks.length; i++) { + blocks[i] = values[i].build(); } - DriverContext driverContext = new DriverContext(bigArrays, blockFactory); - try ( - BlockHash blockHash = forcePackedHash - ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) - : BlockHash.build(specs, driverContext, emitBatchSize, true) - ) { - hash(true, blockHash, callback, values); + hash(callback, emitBatchSize, blocks); + } + + private void hash(Consumer callback, int emitBatchSize, Block... values) { + try { + List specs = new ArrayList<>(values.length); + for (int c = 0; c < values.length; c++) { + specs.add(new HashAggregationOperator.GroupSpec(c, values[c].elementType())); + } + DriverContext driverContext = new DriverContext(bigArrays, blockFactory); + try ( + BlockHash blockHash = forcePackedHash + ? new PackedValuesBlockHash(specs, driverContext, emitBatchSize) + : BlockHash.build(specs, driverContext, emitBatchSize, true) + ) { + hash(true, blockHash, callback, values); + } + } finally { + Releasables.closeExpectNoException(values); } } @@ -1166,24 +1151,28 @@ public void add(int positionOffset, IntBlock groupIds) { blockHash.nonEmpty() ); - Set allowedOrds = new HashSet<>(); - for (int p = 0; p < result.nonEmpty.getPositionCount(); p++) { - allowedOrds.add(result.nonEmpty.getInt(p)); - } - for (int p = 0; p < result.ords.getPositionCount(); p++) { - if (result.ords.isNull(p)) { - continue; + try { + Set allowedOrds = new HashSet<>(); + for (int p = 0; p < result.nonEmpty.getPositionCount(); p++) { + allowedOrds.add(result.nonEmpty.getInt(p)); } - int start = result.ords.getFirstValueIndex(p); - int end = start + result.ords.getValueCount(p); - for (int i = start; i < end; i++) { - int ord = result.ords.getInt(i); - if (false == allowedOrds.contains(ord)) { - fail("ord is not allowed " + ord); + for (int p = 0; p < result.ords.getPositionCount(); p++) { + if (result.ords.isNull(p)) { + continue; + } + int start = result.ords.getFirstValueIndex(p); + int end = start + result.ords.getValueCount(p); + for (int i = start; i < end; i++) { + int ord = result.ords.getInt(i); + if (false == allowedOrds.contains(ord)) { + fail("ord is not allowed " + ord); + } } } + callback.accept(result); + } finally { + Releasables.close(result.keys == null ? null : Releasables.wrap(result.keys), result.nonEmpty); } - callback.accept(result); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index ef8d33a0148b3..0a36617f35b18 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -31,6 +31,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -577,6 +578,13 @@ public void testConstantNullBlock() { assertThat(breaker.getUsed(), is(0L)); int positionCount = randomIntBetween(1, 16 * 1024); Block block = Block.constantNullBlock(positionCount, blockFactory); + assertTrue(block.areAllValuesNull()); + assertThat(block, instanceOf(BooleanBlock.class)); + assertThat(block, instanceOf(IntBlock.class)); + assertThat(block, instanceOf(LongBlock.class)); + assertThat(block, instanceOf(DoubleBlock.class)); + assertThat(block, instanceOf(BytesRefBlock.class)); + assertNull(block.asVector()); if (randomBoolean()) { Block orig = block; block = (new ConstantNullBlock.Builder(blockFactory)).copyFrom(block, 0, block.getPositionCount()).build(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java index 6ba2482ab6d8d..0a63043ecc14b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java @@ -146,7 +146,8 @@ public BooleanBlock.Builder newBooleanBlockBuilder(int estimatedSize) { return b; } - BooleanVector.FixedBuilder newBooleanVectorFixedBuilder(int size) { + @Override + public BooleanVector.FixedBuilder newBooleanVectorFixedBuilder(int size) { var b = super.newBooleanVectorFixedBuilder(size); track(b, trackDetail()); return b; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java new file mode 100644 index 0000000000000..ff231a0cc20e0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java @@ -0,0 +1,140 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.hamcrest.Matchers.equalTo; + +public class SingletonOrdinalsBuilderTests extends ESTestCase { + public void testReader() throws IOException { + testRead(breakingDriverContext().blockFactory()); + } + + public void testReadWithCranky() throws IOException { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new CrankyCircuitBreakerService()); + BlockFactory factory = new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays); + try { + testRead(factory); + // If we made it this far cranky didn't fail us! + } catch (CircuitBreakingException e) { + logger.info("cranky", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + assertThat(factory.breaker().getUsed(), equalTo(0L)); + } + + private void testRead(BlockFactory factory) throws IOException { + int count = 1000; + try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + for (int i = 0; i < count; i++) { + for (BytesRef v : new BytesRef[] { new BytesRef("a"), new BytesRef("b"), new BytesRef("c"), new BytesRef("d") }) { + indexWriter.addDocument(List.of(new SortedDocValuesField("f", v))); + } + } + Map counts = new HashMap<>(); + try (IndexReader reader = indexWriter.getReader()) { + for (LeafReaderContext ctx : reader.leaves()) { + SortedDocValues docValues = ctx.reader().getSortedDocValues("f"); + try (SingletonOrdinalsBuilder builder = new SingletonOrdinalsBuilder(factory, docValues, ctx.reader().numDocs())) { + for (int i = 0; i < ctx.reader().maxDoc(); i++) { + if (ctx.reader().getLiveDocs() == null || ctx.reader().getLiveDocs().get(i)) { + assertThat(docValues.advanceExact(i), equalTo(true)); + builder.appendOrd(docValues.ordValue()); + } + } + try (BytesRefBlock build = builder.build()) { + for (int i = 0; i < build.getPositionCount(); i++) { + counts.merge(build.getBytesRef(i, new BytesRef()).utf8ToString(), 1, (lhs, rhs) -> lhs + rhs); + } + } + } + } + } + assertMap(counts, matchesMap().entry("a", count).entry("b", count).entry("c", count).entry("d", count)); + } + } + + public void testCompactWithNulls() { + assertCompactToUnique(new int[] { -1, -1, -1, -1, 0, 1, 2 }, List.of(0, 1, 2)); + } + + public void testCompactNoNulls() { + assertCompactToUnique(new int[] { 0, 1, 2 }, List.of(0, 1, 2)); + } + + public void testCompactDups() { + assertCompactToUnique(new int[] { 0, 0, 0, 1, 2 }, List.of(0, 1, 2)); + } + + public void testCompactSkips() { + assertCompactToUnique(new int[] { 2, 7, 1000 }, List.of(2, 7, 1000)); + } + + private void assertCompactToUnique(int[] sortedOrds, List expected) { + int uniqueLength = SingletonOrdinalsBuilder.compactToUnique(sortedOrds); + assertMap(Arrays.stream(sortedOrds).mapToObj(Integer::valueOf).limit(uniqueLength).toList(), matchesList(expected)); + } + + private final List breakers = new ArrayList<>(); + private final List blockFactories = new ArrayList<>(); + + /** + * A {@link DriverContext} with a breaking {@link BigArrays} and {@link BlockFactory}. + */ + protected DriverContext breakingDriverContext() { // TODO move this to driverContext once everyone supports breaking + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + BlockFactory factory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(factory); + return new DriverContext(bigArrays, factory); + } + + @After + public void allBreakersEmpty() throws Exception { + // first check that all big arrays are released, which can affect breakers + MockBigArrays.ensureAllArraysAreReleased(); + + for (CircuitBreaker breaker : breakers) { + for (var factory : blockFactories) { + if (factory instanceof MockBlockFactory mockBlockFactory) { + mockBlockFactory.ensureAllBlocksAreReleased(); + } + } + assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); + } + } + +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 0696d02d87af6..d6edc903607cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; @@ -24,10 +25,11 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.junit.After; @@ -37,6 +39,7 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; +import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -115,25 +118,53 @@ protected String expectedDescriptionOfSimple() { // TODO tests for the other data partitioning configurations public void testSimple() { + testSimple(this::driverContext); + } + + public void testSimpleWithCranky() { + try { + testSimple(this::crankyDriverContext); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testSimple(Supplier contexts) { int size = between(1_000, 20_000); int limit = randomBoolean() ? between(10, size) : Integer.MAX_VALUE; - testCount(size, limit); + testCount(contexts, size, limit); } public void testEmpty() { + testEmpty(this::driverContext); + } + + public void testEmptyWithCranky() { + try { + testEmpty(this::crankyDriverContext); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testEmpty(Supplier contexts) { int limit = randomBoolean() ? between(10, 10000) : Integer.MAX_VALUE; - testCount(0, limit); + testCount(contexts, 0, limit); } - private void testCount(int size, int limit) { + private void testCount(Supplier contexts, int size, int limit) { DataPartitioning dataPartitioning = randomFrom(DataPartitioning.values()); - LuceneCountOperator.Factory factory = simple(nonBreakingBigArrays(), dataPartitioning, size, limit); + LuceneCountOperator.Factory factory = simple(contexts.get().bigArrays(), dataPartitioning, size, limit); List results = new CopyOnWriteArrayList<>(); List drivers = new ArrayList<>(); int taskConcurrency = between(1, 8); for (int i = 0; i < taskConcurrency; i++) { - DriverContext ctx = driverContext(); - drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new PageConsumerOperator(results::add), () -> {})); + DriverContext ctx = contexts.get(); + drivers.add(new Driver(ctx, factory.get(ctx), List.of(), new TestResultPageSinkOperator(results::add), () -> {})); } OperatorTestCase.runDriver(drivers); assertThat(results.size(), lessThanOrEqualTo(taskConcurrency)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index bbafc8ed753cc..41fe1a93d9c8b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AnyOperatorTestCase; @@ -25,7 +25,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.fielddata.FieldDataContext; @@ -36,7 +36,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.support.NestedScope; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.junit.After; @@ -48,6 +48,7 @@ import java.util.function.Function; import static org.hamcrest.Matchers.both; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.lessThan; @@ -58,7 +59,7 @@ import static org.mockito.Mockito.when; public class LuceneSourceOperatorTests extends AnyOperatorTestCase { - private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.INTEGER); + private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); private Directory directory = newDirectory(); private IndexReader reader; @@ -135,26 +136,53 @@ protected String expectedDescriptionOfSimple() { public void testShardDataPartitioning() { int size = between(1_000, 20_000); int limit = between(10, size); - testSimple(size, limit); + testSimple(driverContext(), size, limit); } public void testEmpty() { - testSimple(0, between(10, 10_000)); + testSimple(driverContext(), 0, between(10, 10_000)); } - private void testSimple(int size, int limit) { - DriverContext ctx = driverContext(); - LuceneSourceOperator.Factory factory = simple(nonBreakingBigArrays(), DataPartitioning.SHARD, size, limit); - Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.LONG, - S_FIELD - ); + public void testWithCranky() { + try { + testSimple(crankyDriverContext(), between(1, 10_000), 100); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + public void testEmptyWithCranky() { + try { + testSimple(crankyDriverContext(), 0, between(10, 10_000)); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + public void testShardDataPartitioningWithCranky() { + int size = between(1_000, 20_000); + int limit = between(10, size); + try { + testSimple(crankyDriverContext(), size, limit); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testSimple(DriverContext ctx, int size, int limit) { + LuceneSourceOperator.Factory factory = simple(ctx.bigArrays(), DataPartitioning.SHARD, size, limit); + Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD); List results = new ArrayList<>(); + OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new PageConsumerOperator(page -> results.add(page)), () -> {}) + new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index 54853abd0cecb..d1b9e706750df 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -15,8 +15,8 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.AnyOperatorTestCase; @@ -24,7 +24,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -34,7 +34,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.support.NestedScope; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilder; @@ -53,7 +53,7 @@ import static org.mockito.Mockito.when; public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { - private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.INTEGER); + private static final MappedFieldType S_FIELD = new NumberFieldMapper.NumberFieldType("s", NumberFieldMapper.NumberType.LONG); private Directory directory = newDirectory(); private IndexReader reader; @@ -138,28 +138,50 @@ protected String expectedDescriptionOfSimple() { // TODO tests for the other data partitioning configurations public void testShardDataPartitioning() { + testShardDataPartitioning(driverContext()); + } + + public void testShardDataPartitioningWithCranky() { + try { + testShardDataPartitioning(crankyDriverContext()); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testShardDataPartitioning(DriverContext context) { int size = between(1_000, 20_000); int limit = between(10, size); - testSimple(size, limit); + testSimple(context, size, limit); } public void testEmpty() { - testSimple(0, between(10, 10_000)); + testEmpty(driverContext()); } - private void testSimple(int size, int limit) { - DriverContext ctx = driverContext(); - LuceneTopNSourceOperator.Factory factory = simple(nonBreakingBigArrays(), DataPartitioning.SHARD, size, limit); - Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.LONG, - S_FIELD - ); + public void testEmptyWithCranky() { + try { + testEmpty(crankyDriverContext()); + logger.info("cranky didn't break"); + } catch (CircuitBreakingException e) { + logger.info("broken", e); + assertThat(e.getMessage(), equalTo(CrankyCircuitBreakerService.ERROR_MESSAGE)); + } + } + + private void testEmpty(DriverContext context) { + testSimple(context, 0, between(10, 10_000)); + } + + private void testSimple(DriverContext ctx, int size, int limit) { + LuceneTopNSourceOperator.Factory factory = simple(ctx.bigArrays(), DataPartitioning.SHARD, size, limit); + Operator.OperatorFactory readS = ValuesSourceReaderOperatorTests.factory(reader, S_FIELD); List results = new ArrayList<>(); OperatorTestCase.runDriver( - new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new PageConsumerOperator(page -> results.add(page)), () -> {}) + new Driver(ctx, factory.get(ctx), List.of(readS.get(ctx)), new TestResultPageSinkOperator(results::add), () -> {}) ); OperatorTestCase.assertDriverContext(ctx); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index ec1697e9aedd2..269a478560bac 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.DoubleVector; -import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; @@ -47,18 +46,10 @@ import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.fielddata.FieldDataContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; -import org.elasticsearch.search.aggregations.support.FieldContext; -import org.elasticsearch.search.aggregations.support.ValuesSource; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.junit.After; import java.io.IOException; @@ -95,21 +86,12 @@ public void closeIndex() throws IOException { @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.LONG, - new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG) - ); + return factory(reader, new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG)); } - static Operator.OperatorFactory factory(IndexReader reader, ValuesSourceType vsType, ElementType elementType, MappedFieldType ft) { - IndexFieldData fd = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")) - .build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); - FieldContext fc = new FieldContext(ft.name(), fd, ft); - ValuesSource vs = vsType.getField(fc, null); + static Operator.OperatorFactory factory(IndexReader reader, MappedFieldType ft) { return new ValuesSourceReaderOperator.ValuesSourceReaderOperatorFactory( - () -> List.of(new ValueSourceInfo(vsType, vs, elementType, reader)), + List.of(BlockReaderFactories.loaderToFactory(reader, ft.blockLoader(null))), 0, ft.name() ); @@ -241,68 +223,19 @@ public void testLoadAllInOnePageShuffled() { } private void loadSimpleAndAssert(DriverContext driverContext, List input) { - List results = new ArrayList<>(); List operators = List.of( - factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.INT, - new NumberFieldMapper.NumberFieldType("key", NumberFieldMapper.NumberType.INTEGER) - ).get(driverContext), - factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.LONG, - new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG) - ).get(driverContext), - factory(reader, CoreValuesSourceType.KEYWORD, ElementType.BYTES_REF, new KeywordFieldMapper.KeywordFieldType("kwd")).get( - driverContext - ), - factory(reader, CoreValuesSourceType.KEYWORD, ElementType.BYTES_REF, new KeywordFieldMapper.KeywordFieldType("mv_kwd")).get( - driverContext - ), - factory(reader, CoreValuesSourceType.BOOLEAN, ElementType.BOOLEAN, new BooleanFieldMapper.BooleanFieldType("bool")).get( - driverContext - ), - factory(reader, CoreValuesSourceType.BOOLEAN, ElementType.BOOLEAN, new BooleanFieldMapper.BooleanFieldType("mv_bool")).get( - driverContext - ), - factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.INT, - new NumberFieldMapper.NumberFieldType("mv_key", NumberFieldMapper.NumberType.INTEGER) - ).get(driverContext), - factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.LONG, - new NumberFieldMapper.NumberFieldType("mv_long", NumberFieldMapper.NumberType.LONG) - ).get(driverContext), - factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.DOUBLE, - new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE) - ).get(driverContext), - factory( - reader, - CoreValuesSourceType.NUMERIC, - ElementType.DOUBLE, - new NumberFieldMapper.NumberFieldType("mv_double", NumberFieldMapper.NumberType.DOUBLE) - ).get(driverContext) + factory(reader, new NumberFieldMapper.NumberFieldType("key", NumberFieldMapper.NumberType.INTEGER)).get(driverContext), + factory(reader, new NumberFieldMapper.NumberFieldType("long", NumberFieldMapper.NumberType.LONG)).get(driverContext), + factory(reader, new KeywordFieldMapper.KeywordFieldType("kwd")).get(driverContext), + factory(reader, new KeywordFieldMapper.KeywordFieldType("mv_kwd")).get(driverContext), + factory(reader, new BooleanFieldMapper.BooleanFieldType("bool")).get(driverContext), + factory(reader, new BooleanFieldMapper.BooleanFieldType("mv_bool")).get(driverContext), + factory(reader, new NumberFieldMapper.NumberFieldType("mv_key", NumberFieldMapper.NumberType.INTEGER)).get(driverContext), + factory(reader, new NumberFieldMapper.NumberFieldType("mv_long", NumberFieldMapper.NumberType.LONG)).get(driverContext), + factory(reader, new NumberFieldMapper.NumberFieldType("double", NumberFieldMapper.NumberType.DOUBLE)).get(driverContext), + factory(reader, new NumberFieldMapper.NumberFieldType("mv_double", NumberFieldMapper.NumberType.DOUBLE)).get(driverContext) ); - try ( - Driver d = new Driver( - driverContext, - new CannedSourceOperator(input.iterator()), - operators, - new PageConsumerOperator(page -> results.add(page)), - () -> {} - ) - ) { - runDriver(d); - } + List results = drive(operators, input.iterator(), driverContext); assertThat(results, hasSize(input.size())); for (Page p : results) { assertThat(p.getBlockCount(), equalTo(11)); @@ -418,26 +351,30 @@ public void testValuesSourceReaderOperatorWithNulls() throws IOException { driverContext, luceneFactory.get(driverContext), List.of( - factory(reader, CoreValuesSourceType.NUMERIC, ElementType.INT, intFt).get(driverContext), - factory(reader, CoreValuesSourceType.NUMERIC, ElementType.LONG, longFt).get(driverContext), - factory(reader, CoreValuesSourceType.NUMERIC, ElementType.DOUBLE, doubleFt).get(driverContext), - factory(reader, CoreValuesSourceType.KEYWORD, ElementType.BYTES_REF, kwFt).get(driverContext) + factory(reader, intFt).get(driverContext), + factory(reader, longFt).get(driverContext), + factory(reader, doubleFt).get(driverContext), + factory(reader, kwFt).get(driverContext) ), new PageConsumerOperator(page -> { - logger.debug("New page: {}", page); - IntBlock intValuesBlock = page.getBlock(1); - LongBlock longValuesBlock = page.getBlock(2); - DoubleBlock doubleValuesBlock = page.getBlock(3); - BytesRefBlock keywordValuesBlock = page.getBlock(4); + try { + logger.debug("New page: {}", page); + IntBlock intValuesBlock = page.getBlock(1); + LongBlock longValuesBlock = page.getBlock(2); + DoubleBlock doubleValuesBlock = page.getBlock(3); + BytesRefBlock keywordValuesBlock = page.getBlock(4); - for (int i = 0; i < page.getPositionCount(); i++) { - assertFalse(intValuesBlock.isNull(i)); - long j = intValuesBlock.getInt(i); - // Every 100 documents we set fields to null - boolean fieldIsEmpty = j % 100 == 0; - assertEquals(fieldIsEmpty, longValuesBlock.isNull(i)); - assertEquals(fieldIsEmpty, doubleValuesBlock.isNull(i)); - assertEquals(fieldIsEmpty, keywordValuesBlock.isNull(i)); + for (int i = 0; i < page.getPositionCount(); i++) { + assertFalse(intValuesBlock.isNull(i)); + long j = intValuesBlock.getInt(i); + // Every 100 documents we set fields to null + boolean fieldIsEmpty = j % 100 == 0; + assertEquals(fieldIsEmpty, longValuesBlock.isNull(i)); + assertEquals(fieldIsEmpty, doubleValuesBlock.isNull(i)); + assertEquals(fieldIsEmpty, keywordValuesBlock.isNull(i)); + } + } finally { + page.releaseBlocks(); } }), () -> {} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index 00c054ef0031a..290756e81cfae 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -16,6 +16,7 @@ import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -34,7 +35,7 @@ public abstract class AnyOperatorTestCase extends ESTestCase { * The operator configured a "simple" or basic way, used for smoke testing * descriptions and {@link BigArrays} and scatter/gather. */ - protected abstract Operator.OperatorFactory simple(BigArrays bigArrays); + protected abstract Operator.OperatorFactory simple(BigArrays bigArrays); // TODO remove BigArrays - that's part of the context /** * The description of the operator produced by {@link #simple}. @@ -100,23 +101,30 @@ protected final BigArrays nonBreakingBigArrays() { /** * A {@link DriverContext} with a nonBreakingBigArrays. */ - protected DriverContext driverContext() { // TODO make this final and return a breaking block factory + protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + BlockFactory factory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(factory); + return new DriverContext(bigArrays, factory); + } + + protected final DriverContext nonBreakingDriverContext() { // TODO drop this once the driverContext method isn't overrideable return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); } private final List breakers = new ArrayList<>(); private final List blockFactories = new ArrayList<>(); - /** - * A {@link DriverContext} with a breaking {@link BigArrays} and {@link BlockFactory}. - */ - protected DriverContext breakingDriverContext() { // TODO move this to driverContext once everyone supports breaking - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); + protected final DriverContext crankyDriverContext() { + CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - BlockFactory factory = new MockBlockFactory(breaker, bigArrays); - blockFactories.add(factory); - return new DriverContext(bigArrays, factory); + BlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(blockFactory); + return new DriverContext(bigArrays, blockFactory); } @After diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index f5dd2680e0ac7..47febc09e45f5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -7,8 +7,11 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import java.util.ArrayList; import java.util.Iterator; @@ -42,19 +45,32 @@ public static Page mergePages(List pages) { int totalPositions = pages.stream().mapToInt(Page::getPositionCount).sum(); Page first = pages.get(0); Block.Builder[] builders = new Block.Builder[first.getBlockCount()]; - for (int b = 0; b < builders.length; b++) { - builders[b] = first.getBlock(b).elementType().newBlockBuilder(totalPositions); - } - for (Page p : pages) { + try { for (int b = 0; b < builders.length; b++) { - builders[b].copyFrom(p.getBlock(b), 0, p.getPositionCount()); + builders[b] = first.getBlock(b).elementType().newBlockBuilder(totalPositions); } + for (Page p : pages) { + for (int b = 0; b < builders.length; b++) { + builders[b].copyFrom(p.getBlock(b), 0, p.getPositionCount()); + } + } + Block[] blocks = new Block[builders.length]; + Page result = null; + try { + for (int b = 0; b < blocks.length; b++) { + blocks[b] = builders[b].build(); + } + result = new Page(blocks); + } finally { + if (result == null) { + Releasables.close(blocks); + } + } + return result; + } finally { + Iterable releasePages = () -> Iterators.map(pages.iterator(), p -> p::releaseBlocks); + Releasables.closeExpectNoException(Releasables.wrap(builders), Releasables.wrap(releasePages)); } - Block[] blocks = new Block[builders.length]; - for (int b = 0; b < blocks.length; b++) { - blocks[b] = builders[b].build(); - } - return new Page(blocks); } /** diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 7680fd410a709..6906e5f3adda8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -22,6 +22,8 @@ import java.util.stream.Collectors; import java.util.stream.LongStream; +import static org.hamcrest.Matchers.equalTo; + public class ColumnExtractOperatorTests extends OperatorTestCase { @Override @@ -53,7 +55,13 @@ protected Operator.OperatorFactory simple(BigArrays bigArrays) { dvrCtx -> new EvalOperator.ExpressionEvaluator() { @Override public Block.Ref eval(Page page) { - return new Block.Ref(page.getBlock(0), page); + BytesRefBlock input = page.getBlock(0); + for (int i = 0; i < input.getPositionCount(); i++) { + if (input.getBytesRef(i, new BytesRef()).utf8ToString().startsWith("no_")) { + return Block.Ref.floating(Block.constantNullBlock(input.getPositionCount(), input.blockFactory())); + } + } + return new Block.Ref(input, page); } @Override @@ -91,4 +99,17 @@ protected void assertSimpleOutput(List input, List results) { protected ByteSizeValue smallEnoughToCircuitBreak() { return ByteSizeValue.ofBytes(between(1, 32)); } + + public void testAllNullValues() { + DriverContext driverContext = driverContext(); + BytesRef scratch = new BytesRef(); + Block input1 = BytesRefBlock.newBlockBuilder(1, driverContext.blockFactory()).appendBytesRef(new BytesRef("can_match")).build(); + Block input2 = BytesRefBlock.newBlockBuilder(1, driverContext.blockFactory()).appendBytesRef(new BytesRef("no_match")).build(); + List inputPages = List.of(new Page(input1), new Page(input2)); + List outputPages = drive(simple(driverContext.bigArrays()).get(driverContext), inputPages.iterator(), driverContext); + BytesRefBlock output1 = outputPages.get(0).getBlock(1); + BytesRefBlock output2 = outputPages.get(1).getBlock(1); + assertThat(output1.getBytesRef(0, scratch), equalTo(new BytesRef("can_match"))); + assertTrue(output2.areAllValuesNull()); + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index 8640f2c32133b..38ba64f78523e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -9,8 +9,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; @@ -34,12 +32,14 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import static org.hamcrest.Matchers.equalTo; public class DriverTests extends ESTestCase { - public void testThreadContext() { + public void testThreadContext() throws Exception { DriverContext driverContext = driverContext(); ThreadPool threadPool = threadPool(); try { @@ -58,27 +58,28 @@ public Page getOutput() { outPages.add(page); }), () -> {}); ThreadContext threadContext = threadPool.getThreadContext(); - SubscribableListener future = new SubscribableListener<>(); + CountDownLatch latch = new CountDownLatch(1); try (ThreadContext.StoredContext ignored = threadContext.stashContext()) { threadContext.putHeader("user", "user1"); - Driver.start(threadContext, threadPool.executor("esql"), driver, between(1, 1000), future); + Driver.start(threadContext, threadPool.executor("esql"), driver, between(1, 1000), ActionListener.running(() -> { + try { + assertRunningWithRegularUser(threadPool); + assertThat(outPages, equalTo(inPages)); + Map> actualResponseHeaders = new HashMap<>(); + for (Map.Entry> e : threadPool.getThreadContext().getResponseHeaders().entrySet()) { + actualResponseHeaders.put(e.getKey(), Sets.newHashSet(e.getValue())); + } + Map> expectedResponseHeaders = new HashMap<>(warning1.warnings); + for (Map.Entry> e : warning2.warnings.entrySet()) { + expectedResponseHeaders.merge(e.getKey(), e.getValue(), Sets::union); + } + assertThat(actualResponseHeaders, equalTo(expectedResponseHeaders)); + } finally { + latch.countDown(); + } + })); } - future.addListener(ActionListener.running(() -> { - assertRunningWithRegularUser(threadPool); - assertThat(outPages, equalTo(inPages)); - Map> actualResponseHeaders = new HashMap<>(); - for (Map.Entry> e : threadPool.getThreadContext().getResponseHeaders().entrySet()) { - actualResponseHeaders.put(e.getKey(), Sets.newHashSet(e.getValue())); - } - Map> expectedResponseHeaders = new HashMap<>(warning1.warnings); - for (Map.Entry> e : warning2.warnings.entrySet()) { - expectedResponseHeaders.merge(e.getKey(), e.getValue(), Sets::union); - } - assertThat(actualResponseHeaders, equalTo(expectedResponseHeaders)); - })); - PlainActionFuture completion = new PlainActionFuture<>(); - future.addListener(completion); - completion.actionGet(TimeValue.timeValueSeconds(30)); + assertTrue(latch.await(30, TimeUnit.SECONDS)); } finally { terminate(threadPool); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index b017535676b87..e7f5db7579869 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -66,7 +66,17 @@ public void close() {} @Override protected Operator.OperatorFactory simple(BigArrays bigArrays) { - return new EvalOperator.EvalOperatorFactory(dvrCtx -> new Addition(dvrCtx, 0, 1)); + return new EvalOperator.EvalOperatorFactory(new EvalOperator.ExpressionEvaluator.Factory() { + @Override + public EvalOperator.ExpressionEvaluator get(DriverContext context) { + return new Addition(context, 0, 1); + } + + @Override + public String toString() { + return "Addition[lhs=0, rhs=1]"; + } + }); } @Override @@ -108,9 +118,4 @@ public void testReadFromBlock() { protected ByteSizeValue smallEnoughToCircuitBreak() { return ByteSizeValue.ofBytes(between(1, 8000)); } - - @Override - protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory - return breakingDriverContext(); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index fa4c7bea7c9cc..e16f643e1ca4d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -119,9 +119,4 @@ public void testReadFromBlock() { protected ByteSizeValue smallEnoughToCircuitBreak() { return ByteSizeValue.ofBytes(between(1, 600)); } - - @Override - protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory - return breakingDriverContext(); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 1a2c87aff1591..9403d22f2b4c4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -71,7 +71,7 @@ public final void testInitialFinal() { simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext), simpleWithMode(bigArrays, AggregatorMode.FINAL).get(driverContext) ), - new ResultPageSinkOperator(page -> results.add(page)), + new TestResultPageSinkOperator(page -> results.add(page)), () -> {} ) ) { @@ -93,7 +93,7 @@ public final void testManyInitialFinal() { driverContext, new CannedSourceOperator(partials.iterator()), List.of(simpleWithMode(bigArrays, AggregatorMode.FINAL).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { @@ -119,7 +119,7 @@ public final void testInitialIntermediateFinal() { simpleWithMode(bigArrays, AggregatorMode.INTERMEDIATE).get(driverContext), simpleWithMode(bigArrays, AggregatorMode.FINAL).get(driverContext) ), - new ResultPageSinkOperator(page -> results.add(page)), + new TestResultPageSinkOperator(page -> results.add(page)), () -> {} ) ) { @@ -148,7 +148,7 @@ public final void testManyInitialManyPartialFinal() { driverContext, new CannedSourceOperator(intermediates.iterator()), List.of(simpleWithMode(bigArrays, AggregatorMode.FINAL).get(driverContext)), - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { @@ -255,7 +255,7 @@ List createDriversForInput(BigArrays bigArrays, List input, List

    {} ) ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java index afa307c494431..b1ef784ca339c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/HashAggregationOperatorTests.java @@ -31,12 +31,6 @@ import static org.hamcrest.Matchers.hasSize; public class HashAggregationOperatorTests extends ForkingOperatorTestCase { - - @Override - protected DriverContext driverContext() { - return breakingDriverContext(); - } - @Override protected SourceOperator simpleInput(BlockFactory blockFactory, int size) { long max = randomLongBetween(1, Long.MAX_VALUE / size); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index 76f99389a697b..8c85f5927196f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -23,11 +23,6 @@ import static org.hamcrest.Matchers.sameInstance; public class LimitOperatorTests extends OperatorTestCase { - @Override - protected DriverContext driverContext() { - return breakingDriverContext(); - } - @Override protected LimitOperator.Factory simple(BigArrays bigArrays) { return new LimitOperator.Factory(100); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index a105818a2bf03..3572dc620287d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -257,9 +257,4 @@ protected Page createPage(int positionOffset, int length) { List results = drive(new MvExpandOperator(0, randomIntBetween(1, 1000)), input.iterator(), context); assertSimpleOutput(origInput, results); } - - @Override - protected DriverContext driverContext() { - return breakingDriverContext(); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 50b97021c216b..fe6061ee90779 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -124,14 +124,11 @@ public final void testSimpleWithCranky() { DriverContext inputFactoryContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(inputFactoryContext.blockFactory(), between(1_000, 10_000))); - CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); - BlockFactory blockFactory = BlockFactory.getInstance(cranky.getBreaker(CircuitBreaker.REQUEST), bigArrays); - DriverContext driverContext = new DriverContext(bigArrays, blockFactory); + DriverContext driverContext = crankyDriverContext(); boolean driverStarted = false; try { - Operator operator = simple(bigArrays).get(driverContext); + Operator operator = simple(driverContext.bigArrays()).get(driverContext); driverStarted = true; drive(operator, input.iterator(), driverContext); // Either we get lucky and cranky doesn't throw and the test completes or we don't and it throws @@ -145,7 +142,6 @@ public final void testSimpleWithCranky() { } // Note the lack of try/finally here - we're asserting that when the driver throws an exception we clear the breakers. - assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); assertThat(inputFactoryContext.breaker().getUsed(), equalTo(0L)); } @@ -248,7 +244,7 @@ protected final List drive(List operators, Iterator input, driverContext, new CannedSourceOperator(input), operators, - new ResultPageSinkOperator(results::add), + new TestResultPageSinkOperator(results::add), () -> {} ) ) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index 1acdbc4895c94..1aff6be7594aa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -28,11 +28,6 @@ import static org.mockito.Mockito.when; public class ProjectOperatorTests extends OperatorTestCase { - @Override - protected DriverContext driverContext() { - return breakingDriverContext(); - } - public void testProjectionOnEmptyPage() { var page = new Page(0); var projection = new ProjectOperator(randomProjection(10)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index 5dff00ec930c4..c55fbeb29a25e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -99,31 +99,35 @@ public Block.Ref eval(Page page) { public void close() {} }, new FirstWord("test"), driverContext()); - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(1); - builder.beginPositionEntry(); - builder.appendBytesRef(new BytesRef("foo1 bar1")); - builder.appendBytesRef(new BytesRef("foo2 bar2")); - builder.endPositionEntry(); - builder.beginPositionEntry(); - builder.appendBytesRef(new BytesRef("foo3 bar3")); - builder.appendBytesRef(new BytesRef("foo4 bar4")); - builder.appendBytesRef(new BytesRef("foo5 bar5")); - builder.endPositionEntry(); - Page page = new Page(builder.build()); - - Page result = operator.process(page); - Block resultBlock = result.getBlock(1); - assertThat(resultBlock.getPositionCount(), equalTo(2)); - assertThat(resultBlock.getValueCount(0), equalTo(2)); - assertThat(resultBlock.getValueCount(1), equalTo(3)); - BytesRefBlock brb = (BytesRefBlock) resultBlock; - BytesRef spare = new BytesRef(""); - int idx = brb.getFirstValueIndex(0); - assertThat(brb.getBytesRef(idx, spare).utf8ToString(), equalTo("foo1")); - assertThat(brb.getBytesRef(idx + 1, spare).utf8ToString(), equalTo("foo2")); - idx = brb.getFirstValueIndex(1); - assertThat(brb.getBytesRef(idx, spare).utf8ToString(), equalTo("foo3")); - assertThat(brb.getBytesRef(idx + 1, spare).utf8ToString(), equalTo("foo4")); - assertThat(brb.getBytesRef(idx + 2, spare).utf8ToString(), equalTo("foo5")); + Page result = null; + try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(1)) { + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("foo1 bar1")); + builder.appendBytesRef(new BytesRef("foo2 bar2")); + builder.endPositionEntry(); + builder.beginPositionEntry(); + builder.appendBytesRef(new BytesRef("foo3 bar3")); + builder.appendBytesRef(new BytesRef("foo4 bar4")); + builder.appendBytesRef(new BytesRef("foo5 bar5")); + builder.endPositionEntry(); + result = operator.process(new Page(builder.build())); + } + try { + Block resultBlock = result.getBlock(1); + assertThat(resultBlock.getPositionCount(), equalTo(2)); + assertThat(resultBlock.getValueCount(0), equalTo(2)); + assertThat(resultBlock.getValueCount(1), equalTo(3)); + BytesRefBlock brb = (BytesRefBlock) resultBlock; + BytesRef spare = new BytesRef(""); + int idx = brb.getFirstValueIndex(0); + assertThat(brb.getBytesRef(idx, spare).utf8ToString(), equalTo("foo1")); + assertThat(brb.getBytesRef(idx + 1, spare).utf8ToString(), equalTo("foo2")); + idx = brb.getFirstValueIndex(1); + assertThat(brb.getBytesRef(idx, spare).utf8ToString(), equalTo("foo3")); + assertThat(brb.getBytesRef(idx + 1, spare).utf8ToString(), equalTo("foo4")); + assertThat(brb.getBytesRef(idx + 2, spare).utf8ToString(), equalTo("foo5")); + } finally { + result.releaseBlocks(); + } } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResultPageSinkOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java similarity index 85% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResultPageSinkOperator.java rename to x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java index 69e7a78ae5c97..aaa3a6ac8a3c8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ResultPageSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java @@ -17,9 +17,9 @@ * Page Consumer operator that deep copies the input page, closes it, and then passes the copy * to the underlying page consumer. */ -public class ResultPageSinkOperator extends PageConsumerOperator { +public class TestResultPageSinkOperator extends PageConsumerOperator { - public ResultPageSinkOperator(Consumer pageConsumer) { + public TestResultPageSinkOperator(Consumer pageConsumer) { super(page -> { Page copy = BlockTestUtils.deepCopyOf(page, BlockFactory.getNonBreakingInstance()); page.releaseBlocks(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java index 4c975c6c07834..a006a984eb178 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.test.ESTestCase; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -38,11 +37,7 @@ public void testDrainPages() throws Exception { AtomicInteger addedPages = new AtomicInteger(); for (int t = 0; t < producers.length; t++) { producers[t] = new Thread(() -> { - try { - latch.await(10, TimeUnit.SECONDS); - } catch (InterruptedException e) { - throw new AssertionError(e); - } + safeAwait(latch); while (stopped.get() == false && addedPages.incrementAndGet() < 10_000) { buffer.addPage(randomPage(blockFactory)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index 049bc449069b8..6c5bab9b8f784 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -1359,11 +1359,6 @@ public void testCloseWithoutCompleting() { } } - @Override - protected DriverContext driverContext() { // TODO remove this when the parent uses a breaking block factory - return breakingDriverContext(); - } - @SuppressWarnings({ "unchecked", "rawtypes" }) private static void readAsRows(List>> values, Page page) { if (page.getBlockCount() == 0) { diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 0cd9570927635..10c77a05af49b 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -264,6 +264,9 @@ private Response runESQLCommand(String user, String command) throws IOException if (randomBoolean()) { settings.put("data_partitioning", randomFrom("shard", "segment", "doc")); } + if (randomBoolean()) { + settings.put("enrich_max_workers", between(1, 5)); + } pragmas = settings.build(); } XContentBuilder query = JsonXContent.contentBuilder(); diff --git a/x-pack/plugin/esql/qa/server/single-node/build.gradle b/x-pack/plugin/esql/qa/server/single-node/build.gradle index 792059639aaca..3131b4176ee25 100644 --- a/x-pack/plugin/esql/qa/server/single-node/build.gradle +++ b/x-pack/plugin/esql/qa/server/single-node/build.gradle @@ -7,7 +7,7 @@ dependencies { restResources { restApi { - include '_common', 'bulk', 'indices', 'esql', 'xpack', 'enrich', 'cluster' + include '_common', 'bulk', 'get', 'indices', 'esql', 'xpack', 'enrich', 'cluster' } } diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java index eed96a007a1b9..31d0a7646e1b7 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/HeapAttackIT.java @@ -290,7 +290,6 @@ public void testFetchManyBigFields() throws IOException { fetchManyBigFields(100); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100528") public void testFetchTooManyBigFields() throws IOException { initManyBigFieldsIndex(500); assertCircuitBreaks(() -> fetchManyBigFields(500)); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEnrichIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEnrichIT.java new file mode 100644 index 0000000000000..b2222f4f2e78e --- /dev/null +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEnrichIT.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.single_node; + +import org.elasticsearch.xpack.esql.qa.rest.RestEnrichTestCase; + +public class RestEnrichIT extends RestEnrichTestCase {} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java index efb7192bbc3e8..10e63a563efc7 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java +++ b/x-pack/plugin/esql/qa/server/single-node/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/single_node/RestEsqlIT.java @@ -54,7 +54,14 @@ public void testBasicEsql() throws IOException { public void testInvalidPragma() throws IOException { assumeTrue("pragma only enabled on snapshot builds", Build.current().isSnapshot()); - RequestObjectBuilder builder = new RequestObjectBuilder().query("row a = 1, b = 2"); + createIndex("test-index"); + for (int i = 0; i < 10; i++) { + Request request = new Request("POST", "/test-index/_doc/"); + request.addParameter("refresh", "true"); + request.setJsonEntity("{\"f\":" + i + "}"); + assertOK(client().performRequest(request)); + } + RequestObjectBuilder builder = new RequestObjectBuilder().query("from test-index | limit 1 | keep f"); builder.pragmas(Settings.builder().put("data_partitioning", "invalid-option").build()); builder.build(); ResponseException re = expectThrows(ResponseException.class, () -> runEsql(builder)); diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/30_types.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/30_types.yml index 886bb6dc60aca..bf159455d00ca 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/30_types.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/30_types.yml @@ -53,6 +53,39 @@ constant_keyword: - length: {values: 1} - match: {values.0.0: 17} +--- +constant_keyword with null value: + - do: + indices.create: + index: test + body: + mappings: + properties: + kind: + type: constant_keyword + color: + type: keyword + + - do: + bulk: + index: test + refresh: true + body: + - { "index": { } } + - { "color": "red" } + + - do: + esql.query: + body: + query: 'from test | limit 1' + - match: { columns.0.name: color } + - match: { columns.0.type: keyword } + - match: { columns.1.name: kind } + - match: { columns.1.type: keyword } + - length: { values: 1 } + - match: { values.0.0: red } + - match: { values.0.1: null } + --- multivalued keyword: - do: @@ -112,7 +145,7 @@ keyword no doc_values: - match: {columns.0.name: card} - match: {columns.0.type: keyword} - length: {values: 1} - - match: {values.0.0: [diamonds, jack, of]} + - match: {values.0.0: [jack, of, diamonds]} --- wildcard: diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml index 33697a789cc26..6a90fc5a7b8f8 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/40_tsdb.yml @@ -111,7 +111,7 @@ load everything: - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'from test' + query: 'from test [metadata _id]' - match: {columns.0.name: "@timestamp"} - match: {columns.0.type: "date"} diff --git a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/90_non_indexed.yml b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/90_non_indexed.yml index a673fb7a5b88d..9138a9454c571 100644 --- a/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/90_non_indexed.yml +++ b/x-pack/plugin/esql/qa/server/single-node/src/yamlRestTest/resources/rest-api-spec/test/90_non_indexed.yml @@ -1,6 +1,6 @@ setup: - skip: - features: allowed_warnings_regex + features: allowed_warnings - do: indices.create: index: test @@ -85,11 +85,11 @@ setup: } --- -unsupported: +fetch: - do: - allowed_warnings_regex: - - "Field \\[.*\\] cannot be retrieved, it is unsupported or not indexed; returning null" - - "No limit defined, adding default limit of \\[.*\\]" + allowed_warnings: + - "Field [ip_noidx] cannot be retrieved, it is unsupported or not indexed; returning null" + - "No limit defined, adding default limit of [500]" esql.query: body: query: 'from test' @@ -130,18 +130,18 @@ unsupported: - length: { values: 1 } - match: { values.0.0: true } - - match: { values.0.1: null } + - match: { values.0.1: true } - match: { values.0.2: "2021-04-28T18:50:04.467Z" } - - match: { values.0.3: null } + - match: { values.0.3: "2021-04-28T18:50:04.467Z" } - match: { values.0.4: 40 } - - match: { values.0.5: null } + - match: { values.0.5: 40 } - match: { values.0.6: 30 } - - match: { values.0.7: null } + - match: { values.0.7: 30 } - match: { values.0.8: 10 } - - match: { values.0.9: null } + - match: { values.0.9: 10 } - match: { values.0.10: "192.168.0.1" } - match: { values.0.11: null } - match: { values.0.12: "foo" } - - match: { values.0.13: "foo" } # this is a special case, ESQL can retrieve keywords from source + - match: { values.0.13: "foo" } - match: { values.0.14: 20 } - - match: { values.0.15: null } + - match: { values.0.15: 20 } diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java new file mode 100644 index 0000000000000..f409fc6e69dee --- /dev/null +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEnrichTestCase.java @@ -0,0 +1,173 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.qa.rest; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.After; +import org.junit.Before; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.test.MapMatcher.assertMap; +import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase.runEsql; +import static org.hamcrest.Matchers.containsString; + +public class RestEnrichTestCase extends ESRestTestCase { + + private static final String sourceIndexName = "countries"; + private static final String testIndexName = "test"; + private static final String policyName = "countries"; + + @Before + @After + public void assertRequestBreakerEmpty() throws Exception { + EsqlSpecTestCase.assertRequestBreakerEmpty(); + } + + @Before + public void loadTestData() throws IOException { + Request request = new Request("PUT", "/" + testIndexName); + request.setJsonEntity(""" + { + "mappings": { + "properties": { + "geo.dest": { + "type": "keyword" + }, + "number": { + "type": "long" + } + } + } + }"""); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("POST", "/" + testIndexName + "/_bulk"); + request.addParameter("refresh", "true"); + request.setJsonEntity(""" + { "index": {"_id": 1} } + { "geo.dest": "US", "number": 1000 } + { "index": {"_id": 2} } + { "geo.dest": "US", "number": 1000 } + { "index": {"_id": 3} } + { "geo.dest": "CN", "number": 5000 } + """); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("PUT", "/" + sourceIndexName); + request.setJsonEntity(""" + { + "mappings": { + "properties": { + "geo.dest": { + "type": "keyword" + }, + "country_name": { + "type": "keyword" + } + } + } + }"""); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("POST", "/" + sourceIndexName + "/_bulk"); + request.addParameter("refresh", "true"); + request.setJsonEntity(""" + { "index" : {}} + { "geo.dest": "US", "country_name": "United States of America" } + { "index" : {}} + { "geo.dest": "IN", "country_name": "India" } + { "index" : {}} + { "geo.dest": "CN", "country_name": "China" } + """); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("PUT", "/_enrich/policy/" + policyName); + request.setJsonEntity(""" + { + "match": { + "indices": "countries", + "match_field": "geo.dest", + "enrich_fields": ["country_name"] + } + } + """); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + + request = new Request("PUT", "/_enrich/policy/" + policyName + "/_execute"); + assertEquals(200, client().performRequest(request).getStatusLine().getStatusCode()); + } + + @After + public void wipeTestData() throws IOException { + try { + var response = client().performRequest(new Request("DELETE", "/" + testIndexName)); + assertEquals(200, response.getStatusLine().getStatusCode()); + response = client().performRequest(new Request("DELETE", "/" + sourceIndexName)); + assertEquals(200, response.getStatusLine().getStatusCode()); + response = client().performRequest(new Request("DELETE", "/_enrich/policy/" + policyName)); + assertEquals(200, response.getStatusLine().getStatusCode()); + } catch (ResponseException re) { + assertEquals(404, re.getResponse().getStatusLine().getStatusCode()); + } + } + + public void testNonExistentEnrichPolicy() throws IOException { + ResponseException re = expectThrows( + ResponseException.class, + () -> runEsql(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris").build()) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()), + containsString("unresolved enrich policy [countris], did you mean [countries]?") + ); + } + + public void testNonExistentEnrichPolicy_KeepField() throws IOException { + ResponseException re = expectThrows( + ResponseException.class, + () -> runEsql(new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countris | keep number").build()) + ); + assertThat( + EntityUtils.toString(re.getResponse().getEntity()), + containsString("unresolved enrich policy [countris], did you mean [countries]?") + ); + } + + public void testMatchField_ImplicitFieldsList() throws IOException { + Map result = runEsql( + new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | keep number").build() + ); + var columns = List.of(Map.of("name", "number", "type", "long")); + var values = List.of(List.of(1000), List.of(1000), List.of(5000)); + + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + + public void testMatchField_ImplicitFieldsList_WithStats() throws IOException { + Map result = runEsql( + new RestEsqlTestCase.RequestObjectBuilder().query("from test | enrich countries | stats s = sum(number) by country_name") + .build() + ); + var columns = List.of(Map.of("name", "s", "type", "long"), Map.of("name", "country_name", "type", "keyword")); + var values = List.of(List.of(2000, "United States of America"), List.of(5000, "China")); + + assertMap(result, matchesMap().entry("columns", columns).entry("values", values)); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } +} diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java index 4d9a5a259ed03..3693f0b0c2bb9 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/RestEsqlTestCase.java @@ -242,13 +242,12 @@ public void testCSVNoHeaderMode() throws IOException { assertEquals("keyword0,0\r\n", actual); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/98719") public void testWarningHeadersOnFailedConversions() throws IOException { int count = randomFrom(10, 40, 60); bulkLoadTestData(count); Request request = prepareRequest(); - var query = fromIndex() + " | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword))"; + var query = fromIndex() + " | eval asInt = to_int(case(integer % 2 == 0, to_str(integer), keyword)) | limit 1000"; var mediaType = attachBody(new RequestObjectBuilder().query(query).build(), request); RequestOptions.Builder options = request.getOptions().toBuilder(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-ignoreCsvTests.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-ignoreCsvTests.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/blog-IT_tests_only.csv-spec diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec index 54bc481c54b48..8091f7a18463e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/dissect.csv-spec @@ -15,17 +15,13 @@ foo bar | null | null complexPattern -// tag::dissect[] ROW a = "1953-01-23T12:15:00Z - some text - 127.0.0.1;" | DISSECT a "%{Y}-%{M}-%{D}T%{h}:%{m}:%{s}Z - %{msg} - %{ip};" | KEEP Y, M, D, h, m, s, msg, ip -// end::dissect[] ; -// tag::dissect-result[] Y:keyword | M:keyword | D:keyword | h:keyword | m:keyword | s:keyword | msg:keyword | ip:keyword 1953 | 01 | 23 | 12 | 15 | 00 | some text | 127.0.0.1 -// end::dissect-result[] ; @@ -52,6 +48,13 @@ a:keyword | b:keyword | c:keyword | d:keyword foo 1 bar 2 baz | foo,bar,baz | 1 | 2 ; +appendSeparatorUppercase +row a = "foo 1 bar 2 baz" | dissect a "%{+b} %{c} %{+b} %{d} %{+b}" APPEND_SEPARATOR=","; + +a:keyword | b:keyword | c:keyword | d:keyword +foo 1 bar 2 baz | foo,bar,baz | 1 | 2 +; + namedSkip row a = "foo bar baz" | dissect a "%{b} %{?c} %{d}"; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-ignoreCsvTests.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-ignoreCsvTests.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs-IT_tests_only.csv-spec diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 6a4fce52fc8a7..02d530a2ae835 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -468,4 +468,90 @@ count:long | languages:integer 19 |2 15 |1 // end::countAll-result[] +; + +basicGrok +// tag::basicGrok[] +ROW a = "2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" +| GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num}" +| KEEP date, ip, email, num +// end::basicGrok[] +; + +// tag::basicGrok-result[] +date:keyword | ip:keyword | email:keyword | num:keyword +2023-01-23T12:15:00.000Z | 127.0.0.1 | some.email@foo.com | 42 +// end::basicGrok-result[] +; + +grokWithConversionSuffix +// tag::grokWithConversionSuffix[] +ROW a = "2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" +| GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}" +| KEEP date, ip, email, num +// end::grokWithConversionSuffix[] +; + +// tag::grokWithConversionSuffix-result[] +date:keyword | ip:keyword | email:keyword | num:integer +2023-01-23T12:15:00.000Z | 127.0.0.1 | some.email@foo.com | 42 +// end::grokWithConversionSuffix-result[] +; + +grokWithToDatetime +// tag::grokWithToDatetime[] +ROW a = "2023-01-23T12:15:00.000Z 127.0.0.1 some.email@foo.com 42" +| GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}" +| KEEP date, ip, email, num +| EVAL date = TO_DATETIME(date) +// end::grokWithToDatetime[] +; + +// tag::grokWithToDatetime-result[] +ip:keyword | email:keyword | num:integer | date:date +127.0.0.1 | some.email@foo.com | 42 | 2023-01-23T12:15:00.000Z +// end::grokWithToDatetime-result[] +; + +grokWithEscape +// tag::grokWithEscape[] +ROW a = "1.2.3.4 [2023-01-23T12:15:00.000Z] Connected" +| GROK a "%{IP:ip} \\[%{TIMESTAMP_ISO8601:@timestamp}\\] %{GREEDYDATA:status}" +// end::grokWithEscape[] +| KEEP @timestamp +; + +// tag::grokWithEscape-result[] +@timestamp:keyword +2023-01-23T12:15:00.000Z +// end::grokWithEscape-result[] +; + +basicDissect +// tag::basicDissect[] +ROW a = "2023-01-23T12:15:00.000Z - some text - 127.0.0.1" +| DISSECT a "%{date} - %{msg} - %{ip}" +| KEEP date, msg, ip +// end::basicDissect[] +; + +// tag::basicDissect-result[] +date:keyword | msg:keyword | ip:keyword +2023-01-23T12:15:00.000Z | some text | 127.0.0.1 +// end::basicDissect-result[] +; + +dissectWithToDatetime +// tag::dissectWithToDatetime[] +ROW a = "2023-01-23T12:15:00.000Z - some text - 127.0.0.1" +| DISSECT a "%{date} - %{msg} - %{ip}" +| KEEP date, msg, ip +| EVAL date = TO_DATETIME(date) +// end::dissectWithToDatetime[] +; + +// tag::dissectWithToDatetime-result[] +msg:keyword | ip:keyword | date:date +some text | 127.0.0.1 | 2023-01-23T12:15:00.000Z +// end::dissectWithToDatetime-result[] ; \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-ignoreCsvTests.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-ignoreCsvTests.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich-IT_tests_only.csv-spec diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index a22a0e6529df7..b29c8024950f9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -206,3 +206,12 @@ row a = [1.2], b = [2.4, 7.9] | eval c = round(a), d = round(b), e = round([1.2] a:double | b:double | c:double | d: double | e:double | f:double | g:double | h:double 1.2 | [2.4, 7.9] | 1.0 | null | 1.0 | null | 1.1 | null ; + + +evalSplitFoldable +from employees | sort emp_no | eval foldable = "foo,bar" | eval folded_mv = split(foldable, ",") | keep emp_no, foldable, folded_mv | limit 2; + +emp_no:integer | foldable:keyword | folded_mv:keyword +10001 | "foo,bar" | [foo, bar] +10002 | "foo,bar" | [foo, bar] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec index 9dc9444de0155..f71f51d42c45f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/grok.csv-spec @@ -15,17 +15,12 @@ foo bar | null complexPattern -// tag::grok[] ROW a = "1953-01-23T12:15:00Z 127.0.0.1 some.email@foo.com 42" | GROK a "%{TIMESTAMP_ISO8601:date} %{IP:ip} %{EMAILADDRESS:email} %{NUMBER:num:int}" -| KEEP date, ip, email, num -// end::grok[] -; +| KEEP date, ip, email, num; -// tag::grok-result[] date:keyword | ip:keyword | email:keyword | num:integer 1953-01-23T12:15:00Z | 127.0.0.1 | some.email@foo.com | 42 -// end::grok-result[] ; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-IT_tests_only.csv-spec similarity index 100% rename from x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-ignoreCsvTests.csv-spec rename to x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata-IT_tests_only.csv-spec diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index ae27e8f56f9f7..7553cea0e26d5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -98,3 +98,157 @@ sum_a:long | b:integer 12555000 | 29 12555000 | 30 ; + +expandAfterSort1 +from employees | keep job_positions, emp_no | sort emp_no | mv_expand job_positions | limit 10 | sort job_positions; + + job_positions:keyword |emp_no:integer +Accountant |10001 +Head Human Resources |10004 +Principal Support Engineer|10006 +Reporting Analyst |10004 +Senior Python Developer |10001 +Senior Team Lead |10002 +Support Engineer |10004 +Tech Lead |10004 +null |10005 +null |10003 +; + +expandAfterSort2 +from employees | sort emp_no | mv_expand job_positions | keep job_positions, emp_no | limit 5; + + job_positions:keyword |emp_no:integer +Accountant |10001 +Senior Python Developer|10001 +Senior Team Lead |10002 +null |10003 +Head Human Resources |10004 +; + +expandWithMultiSort +from employees | keep emp_no, job_positions | sort emp_no | mv_expand job_positions | limit 10 | where emp_no <= 10006 | sort job_positions nulls first; + +emp_no:integer | job_positions:keyword +10003 |null +10005 |null +10001 |Accountant +10004 |Head Human Resources +10006 |Principal Support Engineer +10004 |Reporting Analyst +10001 |Senior Python Developer +10002 |Senior Team Lead +10004 |Support Engineer +10004 |Tech Lead +; + +filterMvExpanded +from employees | keep emp_no, job_positions | mv_expand job_positions | where job_positions like "A*" | sort job_positions, emp_no; + +emp_no:integer | job_positions:keyword +10001 |Accountant +10012 |Accountant +10016 |Accountant +10023 |Accountant +10025 |Accountant +10028 |Accountant +10034 |Accountant +10037 |Accountant +10044 |Accountant +10045 |Accountant +10050 |Accountant +10051 |Accountant +10066 |Accountant +10081 |Accountant +10085 |Accountant +10089 |Accountant +10092 |Accountant +10094 |Accountant +10010 |Architect +10011 |Architect +10031 |Architect +10032 |Architect +10042 |Architect +10047 |Architect +10059 |Architect +10068 |Architect +10072 |Architect +10076 |Architect +10078 |Architect +10096 |Architect +10098 |Architect +; + +doubleSort_OnDifferentThan_MvExpandedFields +from employees | sort emp_no | mv_expand job_positions | keep emp_no, job_positions, salary | sort salary, job_positions | limit 5; + +emp_no:integer | job_positions:keyword |salary:integer +10015 |Head Human Resources |25324 +10015 |Junior Developer |25324 +10015 |Principal Support Engineer|25324 +10015 |Support Engineer |25324 +10035 |Data Scientist |25945 +; + +doubleLimit_expandLimitLowerThanAvailable +from employees | where emp_no == 10004 | limit 1 | keep emp_no, job_positions | mv_expand job_positions | limit 2; + +emp_no:integer | job_positions:keyword +10004 |Head Human Resources +10004 |Reporting Analyst +; + +doubleLimit_expandLimitGreaterThanAvailable +from employees | where emp_no == 10004 | limit 1 | keep emp_no, job_positions | mv_expand job_positions | limit 5; + +emp_no:integer | job_positions:keyword +10004 |Head Human Resources +10004 |Reporting Analyst +10004 |Support Engineer +10004 |Tech Lead +; + +doubleLimitWithSort +from employees | where emp_no == 10004 | limit 1 | keep emp_no, job_positions | mv_expand job_positions | limit 5 | sort job_positions desc; + +emp_no:integer | job_positions:keyword +10004 |Tech Lead +10004 |Support Engineer +10004 |Reporting Analyst +10004 |Head Human Resources +; + +tripleLimit_WithWhere_InBetween_MvExpand_And_Limit +from employees | where emp_no == 10004 | limit 1 | keep emp_no, job_positions | mv_expand job_positions | where job_positions LIKE "*a*" | limit 2 | where job_positions LIKE "*a*" | limit 3; + +emp_no:integer | job_positions:keyword +10004 |Head Human Resources +10004 |Reporting Analyst +; + + +expandFoldable +row a = "foobar", b = ["foo", "bar"], c = 12 | mv_expand b | where b LIKE "fo*"; + +a:keyword | b:keyword | c:integer +foobar | foo | 12 +; + +expandEvalFoldable +from employees | sort emp_no | limit 2 | eval foldable = "foo,bar" | eval generate_mv = split(foldable,",") | mv_expand generate_mv | keep emp_no, first_name, generate_mv | sort emp_no asc, generate_mv desc; + +emp_no:integer | first_name:keyword | generate_mv:keyword +10001 | Georgi | foo +10001 | Georgi | bar +10002 | Bezalel | foo +10002 | Bezalel | bar +; + + +expandEvalFoldableWhere +from employees | sort emp_no | limit 2 | eval foldable = "foo,bar" | eval generate_mv = split(foldable,",") | mv_expand generate_mv | keep emp_no, first_name, generate_mv | where generate_mv LIKE "fo*"; + +emp_no:integer | first_name:keyword | generate_mv:keyword +10001 | Georgi | foo +10002 | Bezalel | foo +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 60c5fc94ba0d6..117bb9646bc5d 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -9,20 +9,20 @@ showFunctions show functions; name:keyword | synopsis:keyword | argNames:keyword | argTypes:keyword | argDescriptions:keyword |returnType:keyword | description:keyword | optionalArgs:boolean | variadic:boolean -abs |"? abs(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -acos |"? acos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -asin |"? asin(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -atan |"? atan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -atan2 |"? atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" |[y, x] |["integer|long|double|unsigned_long", "integer|long|double|unsigned_long"] |["", ""] |? | "" | [false, false] | false -auto_bucket |? auto_bucket(arg1:?, arg2:?, arg3:?, arg4:?) |[arg1, arg2, arg3, arg4] |[?, ?, ?, ?] |["", "", "", ""] |? | "" | [false, false, false, false] | false +abs |"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |"integer|long|double|unsigned_long" | "" | false | false +acos |"double acos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false +asin |"double asin(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "" |double | "" | false | false +atan |"double atan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false +atan2 |"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" |[y, x] |["integer|long|double|unsigned_long", "integer|long|double|unsigned_long"] |["", ""] |double | "" | [false, false] | false +auto_bucket |"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" |[field, buckets, from, to] |["integer|long|double|date", "integer", "integer|long|double|date", "integer|long|double|date"] |["", "", "", ""] | "double|date" | "" | [false, false, false, false] | false avg |? avg(arg1:?) |arg1 |? | "" |? | "" | false | false case |? case(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true ceil |"? ceil(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false cidr_match |? cidr_match(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true coalesce |? coalesce(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true concat |? concat(arg1:?, arg2...:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | true -cos |"? cos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -cosh |"? cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false +cos |"double cos(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false +cosh |"double cosh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false count |? count(arg1:?) |arg1 |? | "" |? | "" | false | false count_distinct |? count_distinct(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false date_extract |? date_extract(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false @@ -36,22 +36,22 @@ greatest |"? greatest(first:integer|long|double|boolean|keyword| is_finite |? is_finite(arg1:?) |arg1 |? | "" |? | "" | false | false is_infinite |? is_infinite(arg1:?) |arg1 |? | "" |? | "" | false | false is_nan |? is_nan(arg1:?) |arg1 |? | "" |? | "" | false | false -least |"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |? | "" | [false, false] | true -left |"? left(string:keyword, length:integer)" |[string, length] |["keyword", "integer"] |["", ""] |? | "" | [false, false] | false +least |"? least(first:integer|long|double|boolean|keyword|text|ip|version, rest...:integer|long|double|boolean|keyword|text|ip|version)" |[first, rest] |["integer|long|double|boolean|keyword|text|ip|version", "integer|long|double|boolean|keyword|text|ip|version"] |["", ""] |? | "" | [false, false] | true +left |"? left(string:keyword, length:integer)" |[string, length] |["keyword", "integer"] |["", ""] |? | "" | [false, false] | false length |? length(arg1:?) |arg1 |? | "" |? | "" | false | false -log10 |"? log10(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false +log10 |"? log10(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false ltrim |? ltrim(arg1:?) |arg1 |? | "" |? | "" | false | false max |? max(arg1:?) |arg1 |? | "" |? | "" | false | false median |? median(arg1:?) |arg1 |? | "" |? | "" | false | false median_absolute_deviation|? median_absolute_deviation(arg1:?) |arg1 |? | "" |? | "" | false | false min |? min(arg1:?) |arg1 |? | "" |? | "" | false | false mv_avg |? mv_avg(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_concat |? mv_concat(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false -mv_count |? mv_count(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_dedupe |? mv_dedupe(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_max |? mv_max(arg1:?) |arg1 |? | "" |? | "" | false | false +mv_concat |"keyword mv_concat(v:text|keyword, delim:text|keyword)" |[v, delim] |["text|keyword", "text|keyword"] |["values to join", "delimiter"] |keyword | "Reduce a multivalued string field to a single valued field by concatenating all values." | [false, false] | false +mv_count |"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" | integer | "Reduce a multivalued field to a single valued field containing the count of values." | false | false +mv_dedupe |"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" |v | "boolean|date|double|ip|text|integer|keyword|version|long" | "" |? | "Remove duplicate values from a multivalued field." | false | false +mv_max |"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the maximum value." | false | false mv_median |? mv_median(arg1:?) |arg1 |? | "" |? | "" | false | false -mv_min |? mv_min(arg1:?) |arg1 |? | "" |? | "" | false | false +mv_min |"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" |v | "unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long" | "" |? | "Reduce a multivalued field to a single valued field containing the minimum value." | false | false mv_sum |? mv_sum(arg1:?) |arg1 |? | "" |? | "" | false | false now |? now() | null |null | null |? | "" | null | false percentile |? percentile(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false @@ -62,14 +62,14 @@ right |"? right(string:keyword, length:integer)" |[string round |? round(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false rtrim |? rtrim(arg1:?) |arg1 |? | "" |? | "" | false | false sin |"double sin(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" |An angle, in radians |double |Returns the trigonometric sine of an angle | false | false -sinh |"? sinh(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "" |? | "" | false | false +sinh |"double sinh(n:integer|long|double|unsigned_long)"|n |"integer|long|double|unsigned_long" | "" |double | "" | false | false split |? split(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false sqrt |"? sqrt(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false starts_with |? starts_with(arg1:?, arg2:?) |[arg1, arg2] |[?, ?] |["", ""] |? | "" | [false, false] | false substring |? substring(arg1:?, arg2:?, arg3:?) |[arg1, arg2, arg3] |[?, ?, ?] |["", "", ""] |? | "" | [false, false, false]| false sum |? sum(arg1:?) |arg1 |? | "" |? | "" | false | false -tan |"? tan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false -tanh |"? tanh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |? | "" | false | false +tan |"double tan(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false +tanh |"double tanh(n:integer|long|double|unsigned_long)" |n |"integer|long|double|unsigned_long" | "" |double | "" | false | false tau |? tau() | null | null | null |? | "" | null | false to_bool |? to_bool(arg1:?) |arg1 |? | "" |? | "" | false | false to_boolean |? to_boolean(arg1:?) |arg1 |? | "" |? | "" | false | false @@ -98,20 +98,20 @@ showFunctionsSynopsis show functions | keep synopsis; synopsis:keyword -"? abs(n:integer|long|double|unsigned_long)" -"? acos(n:integer|long|double|unsigned_long)" -"? asin(n:integer|long|double|unsigned_long)" -"? atan(n:integer|long|double|unsigned_long)" -"? atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" -? auto_bucket(arg1:?, arg2:?, arg3:?, arg4:?) +"integer|long|double|unsigned_long abs(n:integer|long|double|unsigned_long)" +"double acos(n:integer|long|double|unsigned_long)" +"double asin(n:integer|long|double|unsigned_long)" +"double atan(n:integer|long|double|unsigned_long)" +"double atan2(y:integer|long|double|unsigned_long, x:integer|long|double|unsigned_long)" +"double|date auto_bucket(field:integer|long|double|date, buckets:integer, from:integer|long|double|date, to:integer|long|double|date)" ? avg(arg1:?) ? case(arg1:?, arg2...:?) "? ceil(n:integer|long|double|unsigned_long)" ? cidr_match(arg1:?, arg2...:?) ? coalesce(arg1:?, arg2...:?) ? concat(arg1:?, arg2...:?) -"? cos(n:integer|long|double|unsigned_long)" -"? cosh(n:integer|long|double|unsigned_long)" +"double cos(n:integer|long|double|unsigned_long)" +"double cosh(n:integer|long|double|unsigned_long)" ? count(arg1:?) ? count_distinct(arg1:?, arg2:?) ? date_extract(arg1:?, arg2:?) @@ -135,12 +135,12 @@ synopsis:keyword ? median_absolute_deviation(arg1:?) ? min(arg1:?) ? mv_avg(arg1:?) -? mv_concat(arg1:?, arg2:?) -? mv_count(arg1:?) -? mv_dedupe(arg1:?) -? mv_max(arg1:?) +"keyword mv_concat(v:text|keyword, delim:text|keyword)" +"integer mv_count(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" +"? mv_dedupe(v:boolean|date|double|ip|text|integer|keyword|version|long)" +"? mv_max(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" ? mv_median(arg1:?) -? mv_min(arg1:?) +"? mv_min(v:unsigned_long|date|boolean|double|ip|text|integer|keyword|version|long)" ? mv_sum(arg1:?) ? now() ? percentile(arg1:?, arg2:?) @@ -151,14 +151,14 @@ synopsis:keyword ? round(arg1:?, arg2:?) ? rtrim(arg1:?) "double sin(n:integer|long|double|unsigned_long)" -"? sinh(n:integer|long|double|unsigned_long)" +"double sinh(n:integer|long|double|unsigned_long)" ? split(arg1:?, arg2:?) "? sqrt(n:integer|long|double|unsigned_long)" ? starts_with(arg1:?, arg2:?) ? substring(arg1:?, arg2:?, arg3:?) ? sum(arg1:?) -"? tan(n:integer|long|double|unsigned_long)" -"? tanh(n:integer|long|double|unsigned_long)" +"double tan(n:integer|long|double|unsigned_long)" +"double tanh(n:integer|long|double|unsigned_long)" ? tau() ? to_bool(arg1:?) ? to_boolean(arg1:?) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index d671ba6ec13b1..acf42d908ed66 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -681,3 +681,19 @@ c:l | job_positions:s 4 |Reporting Analyst 4 |Tech Lead ; + +duplicateAggregationsWithoutGrouping +from employees | eval x = salary | stats c = count(), m = min(x), m1 = min(salary), c1 = count(1); + +c:l | m:i | m1:i | c1:l +100 | 25324 | 25324 | 100 +; + +duplicateAggregationsWithGrouping +from employees | eval x = salary | stats c = count(), m = min(x), m1 = min(salary), c1 = count(1) by gender | sort gender; + +c:l| m:i | m1:i | c1:l| gender:s +33 | 25976 | 25976 | 33 | F +57 | 25945 | 25945 | 57 | M +10 | 25324 | 25324 | 10 | null +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec index c513f6670b044..49bf62bf77db7 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/where-like.csv-spec @@ -273,3 +273,17 @@ emp_no:integer | first_name:keyword | last_name:keyword 10086 | Somnath | Foote 10088 | Jungsoon | Syrzycki ; + + +likeWithPath +row x = "C:\\foo\\bar.exe" | mv_expand x | where x LIKE "C:\\\\*"; + +x:keyword +C:\foo\bar.exe +; + +likeWithPathNoMatch +row x = "C:\\foo\\bar.exe" | mv_expand x | where x LIKE "C:\\\\\\\\*"; + +x:keyword +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java index 45e3ea5e3fa6e..c2b275105bdd7 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionIT.java @@ -23,6 +23,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ListMatcher; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.analysis.VerificationException; @@ -34,11 +35,13 @@ import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.OptionalDouble; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; @@ -50,6 +53,8 @@ import static java.util.Comparator.comparing; import static java.util.Comparator.naturalOrder; import static java.util.Comparator.reverseOrder; +import static org.elasticsearch.test.ListMatcher.matchesList; +import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; import static org.hamcrest.Matchers.allOf; @@ -81,7 +86,6 @@ public void testProjectConstant() { assertThat(getValuesList(results).size(), equalTo(40)); assertThat(getValuesList(results).get(0).get(0), equalTo(1)); } - } public void testStatsOverConstant() { @@ -703,7 +707,6 @@ public void testRefreshSearchIdleShards() throws Exception { .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) .put("index.routing.rebalance.enable", "none") ) - .get() ); ensureYellow(indexName); AtomicLong totalValues = new AtomicLong(); @@ -766,7 +769,6 @@ public void testESFilter() throws Exception { .indices() .prepareCreate(indexName) .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5))) - .get() ); ensureYellow(indexName); int numDocs = randomIntBetween(1, 5000); @@ -807,7 +809,6 @@ public void testExtractFields() throws Exception { .prepareCreate(indexName) .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5))) .setMapping("val", "type=long", "tag", "type=keyword") - .get() ); int numDocs = randomIntBetween(1, 100); List indexRequests = new ArrayList<>(); @@ -896,7 +897,6 @@ public void testIndexPatterns() throws Exception { .prepareCreate(indexName) .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5))) .setMapping("data", "type=long", "count", "type=long") - .get() ); ensureYellow(indexName); client().prepareBulk() @@ -957,7 +957,6 @@ public void testOverlappingIndexPatterns() throws Exception { .prepareCreate("test_overlapping_index_patterns_1") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5))) .setMapping("field", "type=long") - .get() ); ensureYellow("test_overlapping_index_patterns_1"); client().prepareBulk() @@ -971,7 +970,6 @@ public void testOverlappingIndexPatterns() throws Exception { .prepareCreate("test_overlapping_index_patterns_2") .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, between(1, 5))) .setMapping("field", "type=keyword") - .get() ); ensureYellow("test_overlapping_index_patterns_2"); client().prepareBulk() @@ -1186,6 +1184,17 @@ public void testGroupingMultiValueByOrdinals() { } } + public void testLoadId() { + try (EsqlQueryResponse results = run("from test [metadata _id] | keep _id | sort _id ")) { + assertThat(results.columns(), equalTo(List.of(new ColumnInfo("_id", "keyword")))); + ListMatcher values = matchesList(); + for (int i = 10; i < 50; i++) { + values = values.item(List.of(Integer.toString(i))); + } + assertMap(getValuesList(results), values); + } + } + public void testUnsupportedTypesOrdinalGrouping() { assertAcked( client().admin().indices().prepareCreate("index-1").setMapping("f1", "type=keyword", "f2", "type=keyword", "v", "type=long") @@ -1263,6 +1272,65 @@ public void testStatsNestFields() { } } + public void testStatsMissingFields() { + String node1 = internalCluster().startDataOnlyNode(); + String node2 = internalCluster().startDataOnlyNode(); + assertAcked( + client().admin() + .indices() + .prepareCreate("foo-index") + .setSettings(Settings.builder().put("index.routing.allocation.require._name", node1)) + .setMapping("foo_int", "type=integer", "foo_long", "type=long", "foo_float", "type=float", "foo_double", "type=double") + ); + assertAcked( + client().admin() + .indices() + .prepareCreate("bar-index") + .setSettings(Settings.builder().put("index.routing.allocation.require._name", node2)) + .setMapping("bar_int", "type=integer", "bar_long", "type=long", "bar_float", "type=float", "bar_double", "type=double") + ); + + var fields = List.of("foo_int", "foo_long", "foo_float", "foo_double"); + var functions = List.of("sum", "count", "avg", "count_distinct"); + for (String field : fields) { + for (String function : functions) { + String stat = String.format(Locale.ROOT, "stats s = %s(%s)", function, field); + String command = String.format(Locale.ROOT, "from foo-index,bar-index | where %s is not null | %s", field, stat); + try (var resp = run(command)) { + var valuesList = getValuesList(resp); + assertEquals(1, resp.columns().size()); + assertEquals(1, valuesList.size()); + } + } + } + } + + public void testCountTextField() { + assertAcked(client().admin().indices().prepareCreate("test_count").setMapping("name", "type=text")); + int numDocs = between(10, 1000); + Set names = new HashSet<>(); + for (int i = 0; i < numDocs; i++) { + String name = "name-" + randomIntBetween(1, 100); + names.add(name); + IndexRequestBuilder indexRequest = client().prepareIndex("test_count").setSource("name", name); + if (randomInt(100) < 5) { + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + } + indexRequest.get(); + } + client().admin().indices().prepareRefresh("test_count").get(); + try (EsqlQueryResponse resp = run("FROM test_count | stats COUNT_DISTINCT(name)")) { + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo((long) names.size())); + assertFalse(row.hasNext()); + } + try (EsqlQueryResponse resp = run("FROM test_count | stats COUNT(name)")) { + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo((long) numDocs)); + assertFalse(row.hasNext()); + } + } + private void createNestedMappingIndex(String indexName) throws IOException { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); @@ -1295,7 +1363,6 @@ private void createNestedMappingIndex(String indexName) throws IOException { .prepareCreate(indexName) .setSettings(Settings.builder().put("index.number_of_shards", ESTestCase.randomIntBetween(1, 3))) .setMapping(builder) - .get() ); } @@ -1372,7 +1439,6 @@ private void createAndPopulateIndex(String indexName, Settings additionalSetting "color", "type=keyword" ) - .get() ); long timestamp = epoch; for (int i = 0; i < 10; i++) { diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionRuntimeFieldIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionRuntimeFieldIT.java index 41450be131e2a..be661b51d41d5 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionRuntimeFieldIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionRuntimeFieldIT.java @@ -38,8 +38,7 @@ import static org.hamcrest.Matchers.equalTo; /** - * Makes sure that the circuit breaker is "plugged in" to ESQL by configuring an - * unreasonably small breaker and tripping it. + * Tests runtime fields against ESQL. */ @ESIntegTestCase.ClusterScope(scope = SUITE, numDataNodes = 1, numClientNodes = 0, supportsDedicatedMasters = false) // @TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index b1fab0ab94af9..edaf9d91e9771 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -177,7 +177,7 @@ public void testTaskContents() throws Exception { } if (o.operator().equals("ValuesSourceReaderOperator[field = pause_me]")) { ValuesSourceReaderOperator.Status oStatus = (ValuesSourceReaderOperator.Status) o.status(); - assertMap(oStatus.readersBuilt(), matchesMap().entry("LongValuesReader", greaterThanOrEqualTo(1))); + assertMap(oStatus.readersBuilt(), matchesMap().entry("ScriptLongs", greaterThanOrEqualTo(1))); assertThat(oStatus.pagesProcessed(), greaterThanOrEqualTo(1)); valuesSourceReaders++; continue; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SyntheticSourceIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SyntheticSourceIT.java index f0365ce78f44a..2585b5325df18 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SyntheticSourceIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/SyntheticSourceIT.java @@ -9,11 +9,13 @@ import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; @@ -30,40 +32,64 @@ protected Collection> nodePlugins() { } public void testMatchOnlyText() throws Exception { - XContentBuilder mapping = JsonXContent.contentBuilder(); - mapping.startObject(); - if (true || randomBoolean()) { - mapping.startObject("_source"); - mapping.field("mode", "synthetic"); - mapping.endObject(); + createIndex(b -> b.field("type", "match_only_text")); + + int numDocs = between(10, 1000); + for (int i = 0; i < numDocs; i++) { + IndexRequestBuilder indexRequest = client().prepareIndex("test").setSource("id", "i" + i, "field", "n" + i); + if (randomInt(100) < 5) { + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + } + indexRequest.get(); } - { - mapping.startObject("properties"); - mapping.startObject("uid"); - mapping.field("type", "keyword"); - mapping.endObject(); - mapping.startObject("name"); - mapping.field("type", "match_only_text"); - mapping.endObject(); - mapping.endObject(); + client().admin().indices().prepareRefresh("test").get(); + + try (EsqlQueryResponse resp = run("from test | sort id asc | limit 1")) { + Iterator row = resp.values().next(); + assertThat(row.next(), equalTo("n0")); + assertThat(row.next(), equalTo("i0")); + assertFalse(row.hasNext()); } - mapping.endObject(); + } - assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); + public void testText() throws Exception { + createIndex(b -> b.field("type", "text").field("store", true)); int numDocs = between(10, 1000); for (int i = 0; i < numDocs; i++) { - IndexRequestBuilder indexRequest = client().prepareIndex("test").setSource("uid", "u" + i); + IndexRequestBuilder indexRequest = client().prepareIndex("test").setSource("id", "i" + i, "field", "n" + i); if (randomInt(100) < 5) { indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); } indexRequest.get(); } client().admin().indices().prepareRefresh("test").get(); - try (EsqlQueryResponse resp = run("from test | keep uid, name | sort uid asc | limit 1")) { + try (EsqlQueryResponse resp = run("from test | keep field, id | sort id asc | limit 1")) { Iterator row = resp.values().next(); - assertThat(row.next(), equalTo("u0")); - assertNull(row.next()); + assertThat(row.next(), equalTo("n0")); + assertThat(row.next(), equalTo("i0")); + assertFalse(row.hasNext()); + } + } + + private void createIndex(CheckedFunction fieldMapping) throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder(); + mapping.startObject(); + { + mapping.startObject("_source"); + mapping.field("mode", "synthetic"); + mapping.endObject(); + } + { + mapping.startObject("properties"); + mapping.startObject("id").field("type", "keyword").endObject(); + mapping.startObject("field"); + fieldMapping.apply(mapping); + mapping.endObject(); + mapping.endObject(); } + mapping.endObject(); + + assertAcked(client().admin().indices().prepareCreate("test").setMapping(mapping)); } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java index 57ef850c8b224..66527e8aa3ab6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsBoolsEvaluator.java @@ -35,14 +35,8 @@ public EqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BooleanBlock lhsBlock = (BooleanBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BooleanBlock rhsBlock = (BooleanBlock) rhsRef.block(); BooleanVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public EqualsBoolsEvaluator get(DriverContext context) { + return new EqualsBoolsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "EqualsBoolsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java index 323068466c185..f63d0bdb95dae 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsDoublesEvaluator.java @@ -37,14 +37,8 @@ public EqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public EqualsDoublesEvaluator get(DriverContext context) { + return new EqualsDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "EqualsDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java index 8d08b1a1cc8a9..ab8fbcb20879b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsIntsEvaluator.java @@ -37,14 +37,8 @@ public EqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public EqualsIntsEvaluator get(DriverContext context) { + return new EqualsIntsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "EqualsIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java index 8b898ebde641b..7368f3ff23213 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsKeywordsEvaluator.java @@ -38,14 +38,8 @@ public EqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); BytesRefVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -99,4 +93,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public EqualsKeywordsEvaluator get(DriverContext context) { + return new EqualsKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "EqualsKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java index 5a48f6e7b4efd..85bf4296e63a8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/EqualsLongsEvaluator.java @@ -37,14 +37,8 @@ public EqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public EqualsLongsEvaluator get(DriverContext context) { + return new EqualsLongsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "EqualsLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java index da69eb3363912..2b69d9b41dde9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanDoublesEvaluator.java @@ -37,14 +37,8 @@ public GreaterThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanDoublesEvaluator get(DriverContext context) { + return new GreaterThanDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java index e1d45cbd10b81..9912428489fd1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanIntsEvaluator.java @@ -37,14 +37,8 @@ public GreaterThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanIntsEvaluator get(DriverContext context) { + return new GreaterThanIntsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java index 855b7aa8ef594..3139138565526 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanKeywordsEvaluator.java @@ -38,14 +38,8 @@ public GreaterThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); BytesRefVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -99,4 +93,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanKeywordsEvaluator get(DriverContext context) { + return new GreaterThanKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java index 32c8f661a0088..281c62932d4d4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanLongsEvaluator.java @@ -37,14 +37,8 @@ public GreaterThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanLongsEvaluator get(DriverContext context) { + return new GreaterThanLongsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java index c46d148395d11..700b2f8371aff 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualDoublesEvaluator.java @@ -37,14 +37,8 @@ public GreaterThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanOrEqualDoublesEvaluator get(DriverContext context) { + return new GreaterThanOrEqualDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanOrEqualDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java index 684fd59912ee4..894c7b743d043 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualIntsEvaluator.java @@ -37,14 +37,8 @@ public GreaterThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanOrEqualIntsEvaluator get(DriverContext context) { + return new GreaterThanOrEqualIntsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanOrEqualIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java index 79a4d1aa14870..da52726494428 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualKeywordsEvaluator.java @@ -38,14 +38,8 @@ public GreaterThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); BytesRefVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -99,4 +93,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanOrEqualKeywordsEvaluator get(DriverContext context) { + return new GreaterThanOrEqualKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanOrEqualKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java index 8adb0e553e800..aecf1e92fb6f0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/GreaterThanOrEqualLongsEvaluator.java @@ -37,14 +37,8 @@ public GreaterThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public GreaterThanOrEqualLongsEvaluator get(DriverContext context) { + return new GreaterThanOrEqualLongsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "GreaterThanOrEqualLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java index 7d2ef9d99aa07..4c0ea5e5b0ea7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanDoublesEvaluator.java @@ -37,14 +37,8 @@ public LessThanDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanDoublesEvaluator get(DriverContext context) { + return new LessThanDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java index cd6b6e25d0adf..7fbb2fc4ecf05 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanIntsEvaluator.java @@ -37,14 +37,8 @@ public LessThanIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanIntsEvaluator get(DriverContext context) { + return new LessThanIntsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java index cbd94fdff111a..774571f139076 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanKeywordsEvaluator.java @@ -38,14 +38,8 @@ public LessThanKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); BytesRefVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -99,4 +93,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanKeywordsEvaluator get(DriverContext context) { + return new LessThanKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java index 15c79206d8a45..261240496bb51 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanLongsEvaluator.java @@ -37,14 +37,8 @@ public LessThanLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanLongsEvaluator get(DriverContext context) { + return new LessThanLongsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java index 2a0b34b7e9739..51295be798fd9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualDoublesEvaluator.java @@ -37,14 +37,8 @@ public LessThanOrEqualDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanOrEqualDoublesEvaluator get(DriverContext context) { + return new LessThanOrEqualDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanOrEqualDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java index f139a38429aa9..c4f29438a6057 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualIntsEvaluator.java @@ -37,14 +37,8 @@ public LessThanOrEqualIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanOrEqualIntsEvaluator get(DriverContext context) { + return new LessThanOrEqualIntsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanOrEqualIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java index 5610ce7055688..7e46ceba0152a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualKeywordsEvaluator.java @@ -38,14 +38,8 @@ public LessThanOrEqualKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); BytesRefVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -99,4 +93,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanOrEqualKeywordsEvaluator get(DriverContext context) { + return new LessThanOrEqualKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanOrEqualKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java index f2caf7477ab73..3a8187b345ead 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/LessThanOrEqualLongsEvaluator.java @@ -37,14 +37,8 @@ public LessThanOrEqualLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public LessThanOrEqualLongsEvaluator get(DriverContext context) { + return new LessThanOrEqualLongsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "LessThanOrEqualLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java index 57e91a6cf6962..aea1620c63820 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsBoolsEvaluator.java @@ -35,14 +35,8 @@ public NotEqualsBoolsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BooleanBlock lhsBlock = (BooleanBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BooleanBlock rhsBlock = (BooleanBlock) rhsRef.block(); BooleanVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public NotEqualsBoolsEvaluator get(DriverContext context) { + return new NotEqualsBoolsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "NotEqualsBoolsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java index 1304deb2b2e3a..d340c88223eec 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsDoublesEvaluator.java @@ -37,14 +37,8 @@ public NotEqualsDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public NotEqualsDoublesEvaluator get(DriverContext context) { + return new NotEqualsDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "NotEqualsDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java index 80d34e7312753..7492140ea6e61 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsIntsEvaluator.java @@ -37,14 +37,8 @@ public NotEqualsIntsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public NotEqualsIntsEvaluator get(DriverContext context) { + return new NotEqualsIntsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "NotEqualsIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java index 8b5481e86a7d0..cac586f498e20 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsKeywordsEvaluator.java @@ -38,14 +38,8 @@ public NotEqualsKeywordsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock lhsBlock = (BytesRefBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock rhsBlock = (BytesRefBlock) rhsRef.block(); BytesRefVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -99,4 +93,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public NotEqualsKeywordsEvaluator get(DriverContext context) { + return new NotEqualsKeywordsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "NotEqualsKeywordsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java index 77073f3e16140..bad6c3cd2c552 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/NotEqualsLongsEvaluator.java @@ -37,14 +37,8 @@ public NotEqualsLongsEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public NotEqualsLongsEvaluator get(DriverContext context) { + return new NotEqualsLongsEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "NotEqualsLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java index 74ca67b12c792..2a3fe4053cf3e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/logical/NotEvaluator.java @@ -31,9 +31,6 @@ public NotEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext driverCont @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BooleanBlock vBlock = (BooleanBlock) vRef.block(); BooleanVector vVector = vBlock.asVector(); if (vVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public NotEvaluator get(DriverContext context) { + return new NotEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "NotEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java index 652eb00996ba5..89be94c494550 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/evaluator/predicate/operator/regex/RegexMatchEvaluator.java @@ -39,9 +39,6 @@ public RegexMatchEvaluator(EvalOperator.ExpressionEvaluator input, CharacterRunA @Override public Block.Ref eval(Page page) { try (Block.Ref inputRef = input.eval(page)) { - if (inputRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock inputBlock = (BytesRefBlock) inputRef.block(); BytesRefVector inputVector = inputBlock.asVector(); if (inputVector == null) { @@ -84,4 +81,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(input); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory input; + + private final CharacterRunAutomaton pattern; + + public Factory(EvalOperator.ExpressionEvaluator.Factory input, CharacterRunAutomaton pattern) { + this.input = input; + this.pattern = pattern; + } + + @Override + public RegexMatchEvaluator get(DriverContext context) { + return new RegexMatchEvaluator(input.get(context), pattern, context); + } + + @Override + public String toString() { + return "RegexMatchEvaluator[" + "input=" + input + ", pattern=" + pattern + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java index ded2ae6b176f7..7a46806c829dd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBooleanEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { BooleanBlock[] valuesBlocks = new BooleanBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (BooleanBlock) block; + valuesBlocks[i] = (BooleanBlock) valuesRefs[i].block(); } BooleanVector[] valuesVectors = new BooleanVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public GreatestBooleanEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new GreatestBooleanEvaluator(values, context); + } + + @Override + public String toString() { + return "GreatestBooleanEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java index 74b5dbef66b6d..c612f5573efc6 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestBytesRefEvaluator.java @@ -39,11 +39,7 @@ public Block.Ref eval(Page page) { BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (BytesRefBlock) block; + valuesBlocks[i] = (BytesRefBlock) valuesRefs[i].block(); } BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -108,4 +104,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public GreatestBytesRefEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new GreatestBytesRefEvaluator(values, context); + } + + @Override + public String toString() { + return "GreatestBytesRefEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java index 454b72246fa5a..0963543e58030 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestDoubleEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { DoubleBlock[] valuesBlocks = new DoubleBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (DoubleBlock) block; + valuesBlocks[i] = (DoubleBlock) valuesRefs[i].block(); } DoubleVector[] valuesVectors = new DoubleVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public GreatestDoubleEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new GreatestDoubleEvaluator(values, context); + } + + @Override + public String toString() { + return "GreatestDoubleEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java index 605a6592f9e8d..15e2205ad9a97 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestIntEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { IntBlock[] valuesBlocks = new IntBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (IntBlock) block; + valuesBlocks[i] = (IntBlock) valuesRefs[i].block(); } IntVector[] valuesVectors = new IntVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public GreatestIntEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new GreatestIntEvaluator(values, context); + } + + @Override + public String toString() { + return "GreatestIntEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java index e03d4cf1101d9..4f123e90edc07 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/GreatestLongEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { LongBlock[] valuesBlocks = new LongBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (LongBlock) block; + valuesBlocks[i] = (LongBlock) valuesRefs[i].block(); } LongVector[] valuesVectors = new LongVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public GreatestLongEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new GreatestLongEvaluator(values, context); + } + + @Override + public String toString() { + return "GreatestLongEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java index 08649061d107e..599eb46f34b2e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBooleanEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { BooleanBlock[] valuesBlocks = new BooleanBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (BooleanBlock) block; + valuesBlocks[i] = (BooleanBlock) valuesRefs[i].block(); } BooleanVector[] valuesVectors = new BooleanVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public LeastBooleanEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new LeastBooleanEvaluator(values, context); + } + + @Override + public String toString() { + return "LeastBooleanEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java index 862dc06a76d3f..f689674d2c255 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastBytesRefEvaluator.java @@ -39,11 +39,7 @@ public Block.Ref eval(Page page) { BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (BytesRefBlock) block; + valuesBlocks[i] = (BytesRefBlock) valuesRefs[i].block(); } BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -108,4 +104,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public LeastBytesRefEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new LeastBytesRefEvaluator(values, context); + } + + @Override + public String toString() { + return "LeastBytesRefEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java index e82da59ab2d3e..5103337411881 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastDoubleEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { DoubleBlock[] valuesBlocks = new DoubleBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (DoubleBlock) block; + valuesBlocks[i] = (DoubleBlock) valuesRefs[i].block(); } DoubleVector[] valuesVectors = new DoubleVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public LeastDoubleEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new LeastDoubleEvaluator(values, context); + } + + @Override + public String toString() { + return "LeastDoubleEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java index 14db9f8d36866..8ed7ca24acb04 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastIntEvaluator.java @@ -37,11 +37,7 @@ public Block.Ref eval(Page page) { IntBlock[] valuesBlocks = new IntBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (IntBlock) block; + valuesBlocks[i] = (IntBlock) valuesRefs[i].block(); } IntVector[] valuesVectors = new IntVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -98,4 +94,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public LeastIntEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new LeastIntEvaluator(values, context); + } + + @Override + public String toString() { + return "LeastIntEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java index 062ea464d4182..28d8a8adc7942 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/LeastLongEvaluator.java @@ -38,11 +38,7 @@ public Block.Ref eval(Page page) { LongBlock[] valuesBlocks = new LongBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (LongBlock) block; + valuesBlocks[i] = (LongBlock) valuesRefs[i].block(); } LongVector[] valuesVectors = new LongVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -99,4 +95,23 @@ public String toString() { public void close() { Releasables.closeExpectNoException(() -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(EvalOperator.ExpressionEvaluator.Factory[] values) { + this.values = values; + } + + @Override + public LeastLongEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new LeastLongEvaluator(values, context); + } + + @Override + public String toString() { + return "LeastLongEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java index 4649b9788d141..962ebc5aa5c3f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromDoubleEvaluator.java @@ -97,4 +97,25 @@ private static boolean evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToBoolean.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToBooleanFromDoubleEvaluator get(DriverContext context) { + return new ToBooleanFromDoubleEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToBooleanFromDoubleEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java index 0ae15fb252dcf..620e4117b7c17 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromIntEvaluator.java @@ -97,4 +97,25 @@ private static boolean evalValue(IntBlock container, int index) { int value = container.getInt(index); return ToBoolean.fromInt(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToBooleanFromIntEvaluator get(DriverContext context) { + return new ToBooleanFromIntEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToBooleanFromIntEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java index 7afa5006c47c7..b267399fe1f29 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromLongEvaluator.java @@ -97,4 +97,25 @@ private static boolean evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToBoolean.fromLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToBooleanFromLongEvaluator get(DriverContext context) { + return new ToBooleanFromLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToBooleanFromLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java index 389429da469f2..abd51282e4f3e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromStringEvaluator.java @@ -100,4 +100,25 @@ private static boolean evalValue(BytesRefBlock container, int index, BytesRef sc BytesRef value = container.getBytesRef(index, scratchPad); return ToBoolean.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToBooleanFromStringEvaluator get(DriverContext context) { + return new ToBooleanFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToBooleanFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java index a1a35051afd6f..130ce5e5a7517 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBooleanFromUnsignedLongEvaluator.java @@ -97,4 +97,25 @@ private static boolean evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToBoolean.fromUnsignedLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToBooleanFromUnsignedLongEvaluator get(DriverContext context) { + return new ToBooleanFromUnsignedLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToBooleanFromUnsignedLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java index 1971eca9a9013..8cc49cb6de969 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetimeFromStringEvaluator.java @@ -100,4 +100,25 @@ private static long evalValue(BytesRefBlock container, int index, BytesRef scrat BytesRef value = container.getBytesRef(index, scratchPad); return ToDatetime.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDatetimeFromStringEvaluator get(DriverContext context) { + return new ToDatetimeFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDatetimeFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java index ddf175b4f8cf7..770c179e90363 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegreesEvaluator.java @@ -96,4 +96,25 @@ private static double evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToDegrees.process(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDegreesEvaluator get(DriverContext context) { + return new ToDegreesEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDegreesEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java index d0e4ee9eabfd6..b2b75e0dc74e3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromBooleanEvaluator.java @@ -97,4 +97,25 @@ private static double evalValue(BooleanBlock container, int index) { boolean value = container.getBoolean(index); return ToDouble.fromBoolean(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDoubleFromBooleanEvaluator get(DriverContext context) { + return new ToDoubleFromBooleanEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDoubleFromBooleanEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java index 99f86c44fb7c7..35894afb2ed70 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromIntEvaluator.java @@ -97,4 +97,25 @@ private static double evalValue(IntBlock container, int index) { int value = container.getInt(index); return ToDouble.fromInt(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDoubleFromIntEvaluator get(DriverContext context) { + return new ToDoubleFromIntEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDoubleFromIntEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java index 9cd28577964a0..2e8fb850a3d89 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromLongEvaluator.java @@ -97,4 +97,25 @@ private static double evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToDouble.fromLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDoubleFromLongEvaluator get(DriverContext context) { + return new ToDoubleFromLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDoubleFromLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java index ca3860edbb5dc..1109318c9246d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromStringEvaluator.java @@ -100,4 +100,25 @@ private static double evalValue(BytesRefBlock container, int index, BytesRef scr BytesRef value = container.getBytesRef(index, scratchPad); return ToDouble.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDoubleFromStringEvaluator get(DriverContext context) { + return new ToDoubleFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDoubleFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java index 34aabdbd345d1..759568428441a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleFromUnsignedLongEvaluator.java @@ -97,4 +97,25 @@ private static double evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToDouble.fromUnsignedLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToDoubleFromUnsignedLongEvaluator get(DriverContext context) { + return new ToDoubleFromUnsignedLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToDoubleFromUnsignedLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java index eeeff15cce7c0..bbbe89cd31879 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIPFromStringEvaluator.java @@ -99,4 +99,25 @@ private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef s BytesRef value = container.getBytesRef(index, scratchPad); return ToIP.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToIPFromStringEvaluator get(DriverContext context) { + return new ToIPFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToIPFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java index 85ae5de0d5a4c..e3fbc2750e35f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromBooleanEvaluator.java @@ -97,4 +97,25 @@ private static int evalValue(BooleanBlock container, int index) { boolean value = container.getBoolean(index); return ToInteger.fromBoolean(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToIntegerFromBooleanEvaluator get(DriverContext context) { + return new ToIntegerFromBooleanEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToIntegerFromBooleanEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java index 6b4d5b1f2c18c..e39ef77725d89 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromDoubleEvaluator.java @@ -97,4 +97,25 @@ private static int evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToInteger.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToIntegerFromDoubleEvaluator get(DriverContext context) { + return new ToIntegerFromDoubleEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToIntegerFromDoubleEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java index 31d7daf08e31e..db8e818cb2031 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromLongEvaluator.java @@ -97,4 +97,25 @@ private static int evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToInteger.fromLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToIntegerFromLongEvaluator get(DriverContext context) { + return new ToIntegerFromLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToIntegerFromLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java index 7000d691df9fe..5dda74375ec0d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromStringEvaluator.java @@ -100,4 +100,25 @@ private static int evalValue(BytesRefBlock container, int index, BytesRef scratc BytesRef value = container.getBytesRef(index, scratchPad); return ToInteger.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToIntegerFromStringEvaluator get(DriverContext context) { + return new ToIntegerFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToIntegerFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java index bcbdc37c2d491..64be1ad6a44dd 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIntegerFromUnsignedLongEvaluator.java @@ -97,4 +97,25 @@ private static int evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToInteger.fromUnsignedLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToIntegerFromUnsignedLongEvaluator get(DriverContext context) { + return new ToIntegerFromUnsignedLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToIntegerFromUnsignedLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java index a3a3ae3ba9988..5337dbdb32d38 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromBooleanEvaluator.java @@ -97,4 +97,25 @@ private static long evalValue(BooleanBlock container, int index) { boolean value = container.getBoolean(index); return ToLong.fromBoolean(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToLongFromBooleanEvaluator get(DriverContext context) { + return new ToLongFromBooleanEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToLongFromBooleanEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java index 92a7092e1fdf6..853882774550b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromDoubleEvaluator.java @@ -97,4 +97,25 @@ private static long evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToLong.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToLongFromDoubleEvaluator get(DriverContext context) { + return new ToLongFromDoubleEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToLongFromDoubleEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java index 8c78fab528baf..3717a2b2da2a8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromIntEvaluator.java @@ -97,4 +97,25 @@ private static long evalValue(IntBlock container, int index) { int value = container.getInt(index); return ToLong.fromInt(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToLongFromIntEvaluator get(DriverContext context) { + return new ToLongFromIntEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToLongFromIntEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java index 4c8de77a31dd4..49c82c93ccf92 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromStringEvaluator.java @@ -100,4 +100,25 @@ private static long evalValue(BytesRefBlock container, int index, BytesRef scrat BytesRef value = container.getBytesRef(index, scratchPad); return ToLong.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToLongFromStringEvaluator get(DriverContext context) { + return new ToLongFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToLongFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java index 3e56089b571dc..7f098b16edc7a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLongFromUnsignedLongEvaluator.java @@ -96,4 +96,25 @@ private static long evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToLong.fromUnsignedLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToLongFromUnsignedLongEvaluator get(DriverContext context) { + return new ToLongFromUnsignedLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToLongFromUnsignedLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java index 820eeff44e37d..b470f96434f34 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadiansEvaluator.java @@ -96,4 +96,25 @@ private static double evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToRadians.process(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToRadiansEvaluator get(DriverContext context) { + return new ToRadiansEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToRadiansEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java index 99ee841b67f9e..a11cccfe5d5be 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromBooleanEvaluator.java @@ -98,4 +98,25 @@ private static BytesRef evalValue(BooleanBlock container, int index) { boolean value = container.getBoolean(index); return ToString.fromBoolean(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromBooleanEvaluator get(DriverContext context) { + return new ToStringFromBooleanEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromBooleanEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java index c934a2207272c..94f7357bc5917 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDatetimeEvaluator.java @@ -98,4 +98,25 @@ private static BytesRef evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToString.fromDatetime(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromDatetimeEvaluator get(DriverContext context) { + return new ToStringFromDatetimeEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromDatetimeEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java index 63a92b239d744..d219f607d44f7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromDoubleEvaluator.java @@ -98,4 +98,25 @@ private static BytesRef evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToString.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromDoubleEvaluator get(DriverContext context) { + return new ToStringFromDoubleEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromDoubleEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java index 411b55ce74548..709e06844327c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIPEvaluator.java @@ -99,4 +99,25 @@ private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef s BytesRef value = container.getBytesRef(index, scratchPad); return ToString.fromIP(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromIPEvaluator get(DriverContext context) { + return new ToStringFromIPEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromIPEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java index 177d16eb105b6..659c92203f575 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromIntEvaluator.java @@ -98,4 +98,25 @@ private static BytesRef evalValue(IntBlock container, int index) { int value = container.getInt(index); return ToString.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromIntEvaluator get(DriverContext context) { + return new ToStringFromIntEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromIntEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java index 8bb24f09bdc87..f54494cff5704 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromLongEvaluator.java @@ -98,4 +98,25 @@ private static BytesRef evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToString.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromLongEvaluator get(DriverContext context) { + return new ToStringFromLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java index 4212bd183bc43..76803d0684682 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromUnsignedLongEvaluator.java @@ -98,4 +98,25 @@ private static BytesRef evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToString.fromUnsignedLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromUnsignedLongEvaluator get(DriverContext context) { + return new ToStringFromUnsignedLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromUnsignedLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java index c33de8bc1a6b0..4866f68dae6d4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToStringFromVersionEvaluator.java @@ -99,4 +99,25 @@ private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef s BytesRef value = container.getBytesRef(index, scratchPad); return ToString.fromVersion(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToStringFromVersionEvaluator get(DriverContext context) { + return new ToStringFromVersionEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToStringFromVersionEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java index 9f7b2edaf3b81..dff1819ac4f66 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromBooleanEvaluator.java @@ -97,4 +97,25 @@ private static long evalValue(BooleanBlock container, int index) { boolean value = container.getBoolean(index); return ToUnsignedLong.fromBoolean(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToUnsignedLongFromBooleanEvaluator get(DriverContext context) { + return new ToUnsignedLongFromBooleanEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToUnsignedLongFromBooleanEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java index c4ac4be9fad76..38bf437a5f720 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromDoubleEvaluator.java @@ -97,4 +97,25 @@ private static long evalValue(DoubleBlock container, int index) { double value = container.getDouble(index); return ToUnsignedLong.fromDouble(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToUnsignedLongFromDoubleEvaluator get(DriverContext context) { + return new ToUnsignedLongFromDoubleEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToUnsignedLongFromDoubleEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java index 4c4db207c8214..d395e37410b7a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromIntEvaluator.java @@ -97,4 +97,25 @@ private static long evalValue(IntBlock container, int index) { int value = container.getInt(index); return ToUnsignedLong.fromInt(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToUnsignedLongFromIntEvaluator get(DriverContext context) { + return new ToUnsignedLongFromIntEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToUnsignedLongFromIntEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java index a8f8261043d28..398142981cbef 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromLongEvaluator.java @@ -96,4 +96,25 @@ private static long evalValue(LongBlock container, int index) { long value = container.getLong(index); return ToUnsignedLong.fromLong(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToUnsignedLongFromLongEvaluator get(DriverContext context) { + return new ToUnsignedLongFromLongEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToUnsignedLongFromLongEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java index c60f4e1aea2d6..07e2ec42da7b8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLongFromStringEvaluator.java @@ -100,4 +100,25 @@ private static long evalValue(BytesRefBlock container, int index, BytesRef scrat BytesRef value = container.getBytesRef(index, scratchPad); return ToUnsignedLong.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToUnsignedLongFromStringEvaluator get(DriverContext context) { + return new ToUnsignedLongFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToUnsignedLongFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java index e64e7fd575e8b..00f88ba76b3e7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersionFromStringEvaluator.java @@ -99,4 +99,25 @@ private static BytesRef evalValue(BytesRefBlock container, int index, BytesRef s BytesRef value = container.getBytesRef(index, scratchPad); return ToVersion.fromKeyword(value); } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field, Source source) { + this.field = field; + this.source = source; + } + + @Override + public ToVersionFromStringEvaluator get(DriverContext context) { + return new ToVersionFromStringEvaluator(field.get(context), source, context); + } + + @Override + public String toString() { + return "ToVersionFromStringEvaluator[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java index 22feef4b574db..c8c2d615d3b2d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractConstantEvaluator.java @@ -40,9 +40,6 @@ public DateExtractConstantEvaluator(EvalOperator.ExpressionEvaluator value, @Override public Block.Ref eval(Page page) { try (Block.Ref valueRef = value.eval(page)) { - if (valueRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valueBlock = (LongBlock) valueRef.block(); LongVector valueVector = valueBlock.asVector(); if (valueVector == null) { @@ -83,4 +80,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(value); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final ChronoField chronoField; + + private final ZoneId zone; + + public Factory(EvalOperator.ExpressionEvaluator.Factory value, ChronoField chronoField, + ZoneId zone) { + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + } + + @Override + public DateExtractConstantEvaluator get(DriverContext context) { + return new DateExtractConstantEvaluator(value.get(context), chronoField, zone, context); + } + + @Override + public String toString() { + return "DateExtractConstantEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java index a9a2bd8a8b15f..12f755e0b7a8a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtractEvaluator.java @@ -48,14 +48,8 @@ public DateExtractEvaluator(Source source, EvalOperator.ExpressionEvaluator valu @Override public Block.Ref eval(Page page) { try (Block.Ref valueRef = value.eval(page)) { - if (valueRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valueBlock = (LongBlock) valueRef.block(); try (Block.Ref chronoFieldRef = chronoField.eval(page)) { - if (chronoFieldRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock chronoFieldBlock = (BytesRefBlock) chronoFieldRef.block(); LongVector valueVector = valueBlock.asVector(); if (valueVector == null) { @@ -118,4 +112,32 @@ public String toString() { public void close() { Releasables.closeExpectNoException(value, chronoField); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final EvalOperator.ExpressionEvaluator.Factory chronoField; + + private final ZoneId zone; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, + EvalOperator.ExpressionEvaluator.Factory chronoField, ZoneId zone) { + this.source = source; + this.value = value; + this.chronoField = chronoField; + this.zone = zone; + } + + @Override + public DateExtractEvaluator get(DriverContext context) { + return new DateExtractEvaluator(source, value.get(context), chronoField.get(context), zone, context); + } + + @Override + public String toString() { + return "DateExtractEvaluator[" + "value=" + value + ", chronoField=" + chronoField + ", zone=" + zone + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java index 873332f3de8fd..35a3d45be821d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatConstantEvaluator.java @@ -38,9 +38,6 @@ public DateFormatConstantEvaluator(EvalOperator.ExpressionEvaluator val, DateFor @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -81,4 +78,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final DateFormatter formatter; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val, DateFormatter formatter) { + this.val = val; + this.formatter = formatter; + } + + @Override + public DateFormatConstantEvaluator get(DriverContext context) { + return new DateFormatConstantEvaluator(val.get(context), formatter, context); + } + + @Override + public String toString() { + return "DateFormatConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java index ba1308ccb7dd6..95547c978af01 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormatEvaluator.java @@ -42,14 +42,8 @@ public DateFormatEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); try (Block.Ref formatterRef = formatter.eval(page)) { - if (formatterRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock formatterBlock = (BytesRefBlock) formatterRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -102,4 +96,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val, formatter); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory formatter; + + private final Locale locale; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory formatter, Locale locale) { + this.val = val; + this.formatter = formatter; + this.locale = locale; + } + + @Override + public DateFormatEvaluator get(DriverContext context) { + return new DateFormatEvaluator(val.get(context), formatter.get(context), locale, context); + } + + @Override + public String toString() { + return "DateFormatEvaluator[" + "val=" + val + ", formatter=" + formatter + ", locale=" + locale + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java index 55de843011250..7fbd0cf387768 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseConstantEvaluator.java @@ -44,9 +44,6 @@ public DateParseConstantEvaluator(Source source, EvalOperator.ExpressionEvaluato @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); BytesRefVector valVector = valBlock.asVector(); if (valVector == null) { @@ -99,4 +96,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final DateFormatter formatter; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, + DateFormatter formatter) { + this.source = source; + this.val = val; + this.formatter = formatter; + } + + @Override + public DateParseConstantEvaluator get(DriverContext context) { + return new DateParseConstantEvaluator(source, val.get(context), formatter, context); + } + + @Override + public String toString() { + return "DateParseConstantEvaluator[" + "val=" + val + ", formatter=" + formatter + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java index 536ebfbf16a5d..2a64bc27d79ba 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParseEvaluator.java @@ -47,14 +47,8 @@ public DateParseEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); try (Block.Ref formatterRef = formatter.eval(page)) { - if (formatterRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock formatterBlock = (BytesRefBlock) formatterRef.block(); BytesRefVector valVector = valBlock.asVector(); if (valVector == null) { @@ -119,4 +113,32 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val, formatter); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory formatter; + + private final ZoneId zoneId; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory formatter, ZoneId zoneId) { + this.source = source; + this.val = val; + this.formatter = formatter; + this.zoneId = zoneId; + } + + @Override + public DateParseEvaluator get(DriverContext context) { + return new DateParseEvaluator(source, val.get(context), formatter.get(context), zoneId, context); + } + + @Override + public String toString() { + return "DateParseEvaluator[" + "val=" + val + ", formatter=" + formatter + ", zoneId=" + zoneId + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java index f828fdf94b311..732b957d91c42 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTruncEvaluator.java @@ -36,9 +36,6 @@ public DateTruncEvaluator(EvalOperator.ExpressionEvaluator fieldVal, Rounding.Pr @Override public Block.Ref eval(Page page) { try (Block.Ref fieldValRef = fieldVal.eval(page)) { - if (fieldValRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock fieldValBlock = (LongBlock) fieldValRef.block(); LongVector fieldValVector = fieldValBlock.asVector(); if (fieldValVector == null) { @@ -79,4 +76,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(fieldVal); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; + + private final Rounding.Prepared rounding; + + public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal, Rounding.Prepared rounding) { + this.fieldVal = fieldVal; + this.rounding = rounding; + } + + @Override + public DateTruncEvaluator get(DriverContext context) { + return new DateTruncEvaluator(fieldVal.get(context), rounding, context); + } + + @Override + public String toString() { + return "DateTruncEvaluator[" + "fieldVal=" + fieldVal + ", rounding=" + rounding + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java index ba6038fdf44fa..c68a04e5fad30 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/date/NowEvaluator.java @@ -48,4 +48,22 @@ public String toString() { @Override public void close() { } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final long now; + + public Factory(long now) { + this.now = now; + } + + @Override + public NowEvaluator get(DriverContext context) { + return new NowEvaluator(now, context); + } + + @Override + public String toString() { + return "NowEvaluator[" + "now=" + now + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java index 5a9a6efdbfc35..8b1b88b4b39f0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatchEvaluator.java @@ -40,20 +40,13 @@ public CIDRMatchEvaluator(EvalOperator.ExpressionEvaluator ip, @Override public Block.Ref eval(Page page) { try (Block.Ref ipRef = ip.eval(page)) { - if (ipRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock ipBlock = (BytesRefBlock) ipRef.block(); Block.Ref[] cidrsRefs = new Block.Ref[cidrs.length]; try (Releasable cidrsRelease = Releasables.wrap(cidrsRefs)) { BytesRefBlock[] cidrsBlocks = new BytesRefBlock[cidrs.length]; for (int i = 0; i < cidrsBlocks.length; i++) { cidrsRefs[i] = cidrs[i].eval(page); - Block block = cidrsRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - cidrsBlocks[i] = (BytesRefBlock) block; + cidrsBlocks[i] = (BytesRefBlock) cidrsRefs[i].block(); } BytesRefVector ipVector = ipBlock.asVector(); if (ipVector == null) { @@ -130,4 +123,27 @@ public String toString() { public void close() { Releasables.closeExpectNoException(ip, () -> Releasables.close(cidrs)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory ip; + + private final EvalOperator.ExpressionEvaluator.Factory[] cidrs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory ip, + EvalOperator.ExpressionEvaluator.Factory[] cidrs) { + this.ip = ip; + this.cidrs = cidrs; + } + + @Override + public CIDRMatchEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] cidrs = Arrays.stream(this.cidrs).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new CIDRMatchEvaluator(ip.get(context), cidrs, context); + } + + @Override + public String toString() { + return "CIDRMatchEvaluator[" + "ip=" + ip + ", cidrs=" + Arrays.toString(cidrs) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java index f4c5d6afc802d..0c475df2c1d1b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsDoubleEvaluator.java @@ -32,9 +32,6 @@ public AbsDoubleEvaluator(EvalOperator.ExpressionEvaluator fieldVal, @Override public Block.Ref eval(Page page) { try (Block.Ref fieldValRef = fieldVal.eval(page)) { - if (fieldValRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock fieldValBlock = (DoubleBlock) fieldValRef.block(); DoubleVector fieldValVector = fieldValBlock.asVector(); if (fieldValVector == null) { @@ -75,4 +72,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(fieldVal); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; + + public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.fieldVal = fieldVal; + } + + @Override + public AbsDoubleEvaluator get(DriverContext context) { + return new AbsDoubleEvaluator(fieldVal.get(context), context); + } + + @Override + public String toString() { + return "AbsDoubleEvaluator[" + "fieldVal=" + fieldVal + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java index 5b736c21bdaa6..789113f780677 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsIntEvaluator.java @@ -31,9 +31,6 @@ public AbsIntEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext @Override public Block.Ref eval(Page page) { try (Block.Ref fieldValRef = fieldVal.eval(page)) { - if (fieldValRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock fieldValBlock = (IntBlock) fieldValRef.block(); IntVector fieldValVector = fieldValBlock.asVector(); if (fieldValVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(fieldVal); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; + + public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.fieldVal = fieldVal; + } + + @Override + public AbsIntEvaluator get(DriverContext context) { + return new AbsIntEvaluator(fieldVal.get(context), context); + } + + @Override + public String toString() { + return "AbsIntEvaluator[" + "fieldVal=" + fieldVal + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java index 25a26377e2799..6c8a5597dec0a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbsLongEvaluator.java @@ -31,9 +31,6 @@ public AbsLongEvaluator(EvalOperator.ExpressionEvaluator fieldVal, DriverContext @Override public Block.Ref eval(Page page) { try (Block.Ref fieldValRef = fieldVal.eval(page)) { - if (fieldValRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock fieldValBlock = (LongBlock) fieldValRef.block(); LongVector fieldValVector = fieldValBlock.asVector(); if (fieldValVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(fieldVal); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory fieldVal; + + public Factory(EvalOperator.ExpressionEvaluator.Factory fieldVal) { + this.fieldVal = fieldVal; + } + + @Override + public AbsLongEvaluator get(DriverContext context) { + return new AbsLongEvaluator(fieldVal.get(context), context); + } + + @Override + public String toString() { + return "AbsLongEvaluator[" + "fieldVal=" + fieldVal + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java index 6f858f17d9245..56ffd31806ebb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AcosEvaluator.java @@ -38,9 +38,6 @@ public AcosEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public AcosEvaluator get(DriverContext context) { + return new AcosEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "AcosEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java index 449f06bf6db43..466bffac10fd4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AsinEvaluator.java @@ -38,9 +38,6 @@ public AsinEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public AsinEvaluator get(DriverContext context) { + return new AsinEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "AsinEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java index dea34d613c807..2cceab1bbad2f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2Evaluator.java @@ -35,14 +35,8 @@ public Atan2Evaluator(EvalOperator.ExpressionEvaluator y, EvalOperator.Expressio @Override public Block.Ref eval(Page page) { try (Block.Ref yRef = y.eval(page)) { - if (yRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock yBlock = (DoubleBlock) yRef.block(); try (Block.Ref xRef = x.eval(page)) { - if (xRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock xBlock = (DoubleBlock) xRef.block(); DoubleVector yVector = yBlock.asVector(); if (yVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(y, x); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory y; + + private final EvalOperator.ExpressionEvaluator.Factory x; + + public Factory(EvalOperator.ExpressionEvaluator.Factory y, + EvalOperator.ExpressionEvaluator.Factory x) { + this.y = y; + this.x = x; + } + + @Override + public Atan2Evaluator get(DriverContext context) { + return new Atan2Evaluator(y.get(context), x.get(context), context); + } + + @Override + public String toString() { + return "Atan2Evaluator[" + "y=" + y + ", x=" + x + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java index ae1202630f262..11a628ce473b8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/AtanEvaluator.java @@ -31,9 +31,6 @@ public AtanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverC @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public AtanEvaluator get(DriverContext context) { + return new AtanEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "AtanEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java index 50e82d58a9ab6..2df190f80fbdf 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToDoubleEvaluator.java @@ -33,9 +33,6 @@ public CastIntToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, DriverContex @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock vBlock = (IntBlock) vRef.block(); IntVector vVector = vBlock.asVector(); if (vVector == null) { @@ -76,4 +73,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public CastIntToDoubleEvaluator get(DriverContext context) { + return new CastIntToDoubleEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "CastIntToDoubleEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java index 6498e3c456d41..b45b02ec5cc7e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToLongEvaluator.java @@ -33,9 +33,6 @@ public CastIntToLongEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock vBlock = (IntBlock) vRef.block(); IntVector vVector = vBlock.asVector(); if (vVector == null) { @@ -76,4 +73,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public CastIntToLongEvaluator get(DriverContext context) { + return new CastIntToLongEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "CastIntToLongEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java index a55ed80c60b26..2e9705cae6dd5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastIntToUnsignedLongEvaluator.java @@ -34,9 +34,6 @@ public CastIntToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock vBlock = (IntBlock) vRef.block(); IntVector vVector = vBlock.asVector(); if (vVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public CastIntToUnsignedLongEvaluator get(DriverContext context) { + return new CastIntToUnsignedLongEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "CastIntToUnsignedLongEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java index b5aad5ccbe1a3..f182dbd9da4ff 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToDoubleEvaluator.java @@ -34,9 +34,6 @@ public CastLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock vBlock = (LongBlock) vRef.block(); LongVector vVector = vBlock.asVector(); if (vVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public CastLongToDoubleEvaluator get(DriverContext context) { + return new CastLongToDoubleEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "CastLongToDoubleEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java index 777912eb318e4..0fdeea21abd3b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastLongToUnsignedLongEvaluator.java @@ -32,9 +32,6 @@ public CastLongToUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator v, @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock vBlock = (LongBlock) vRef.block(); LongVector vVector = vBlock.asVector(); if (vVector == null) { @@ -75,4 +72,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public CastLongToUnsignedLongEvaluator get(DriverContext context) { + return new CastLongToUnsignedLongEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "CastLongToUnsignedLongEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java index 446c78556297a..ee97cd124ed17 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CastUnsignedLongToDoubleEvaluator.java @@ -34,9 +34,6 @@ public CastUnsignedLongToDoubleEvaluator(EvalOperator.ExpressionEvaluator v, @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock vBlock = (LongBlock) vRef.block(); LongVector vVector = vBlock.asVector(); if (vVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public CastUnsignedLongToDoubleEvaluator get(DriverContext context) { + return new CastUnsignedLongToDoubleEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "CastUnsignedLongToDoubleEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java index e2dca44c7e367..cf0ef72b1fbda 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CeilDoubleEvaluator.java @@ -31,9 +31,6 @@ public CeilDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext d @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public CeilDoubleEvaluator get(DriverContext context) { + return new CeilDoubleEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "CeilDoubleEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java index 441e468f06967..f6201661fcaea 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CosEvaluator.java @@ -31,9 +31,6 @@ public CosEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverCo @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public CosEvaluator get(DriverContext context) { + return new CosEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "CosEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java index 9f5ea0e7829f3..35115b4098dba 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/CoshEvaluator.java @@ -38,9 +38,6 @@ public CoshEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public CoshEvaluator get(DriverContext context) { + return new CoshEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "CoshEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java index d7de9b12aa201..db33a7247d045 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/FloorDoubleEvaluator.java @@ -31,9 +31,6 @@ public FloorDoubleEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public FloorDoubleEvaluator get(DriverContext context) { + return new FloorDoubleEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "FloorDoubleEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java index 2535e078691c9..4886ef5ab2a79 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFiniteEvaluator.java @@ -33,9 +33,6 @@ public IsFiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext dri @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -76,4 +73,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public IsFiniteEvaluator get(DriverContext context) { + return new IsFiniteEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "IsFiniteEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java index 1d12ffe2f50f9..f59d1fff1cbc3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfiniteEvaluator.java @@ -33,9 +33,6 @@ public IsInfiniteEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext d @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -76,4 +73,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public IsInfiniteEvaluator get(DriverContext context) { + return new IsInfiniteEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "IsInfiniteEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java index af809390d0991..67d4b56a4155a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaNEvaluator.java @@ -33,9 +33,6 @@ public IsNaNEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driver @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -76,4 +73,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public IsNaNEvaluator get(DriverContext context) { + return new IsNaNEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "IsNaNEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java index 665d441de8e04..b65d16c496050 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10DoubleEvaluator.java @@ -38,9 +38,6 @@ public Log10DoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public Log10DoubleEvaluator get(DriverContext context) { + return new Log10DoubleEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "Log10DoubleEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java index 4ae2b4b4ec944..5153df6cdef2a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10IntEvaluator.java @@ -39,9 +39,6 @@ public Log10IntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock valBlock = (IntBlock) valRef.block(); IntVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +89,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public Log10IntEvaluator get(DriverContext context) { + return new Log10IntEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "Log10IntEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java index bac27b792f0ef..3b8f3e5319a3b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10LongEvaluator.java @@ -39,9 +39,6 @@ public Log10LongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +89,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public Log10LongEvaluator get(DriverContext context) { + return new Log10LongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "Log10LongEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java index f7c77e64313fd..baa1198f009ff 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10UnsignedLongEvaluator.java @@ -39,9 +39,6 @@ public Log10UnsignedLongEvaluator(Source source, EvalOperator.ExpressionEvaluato @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +89,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public Log10UnsignedLongEvaluator get(DriverContext context) { + return new Log10UnsignedLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "Log10UnsignedLongEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java index ed12852d25ca3..271b036f9dfdb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowDoubleEvaluator.java @@ -41,14 +41,8 @@ public PowDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator base, @Override public Block.Ref eval(Page page) { try (Block.Ref baseRef = base.eval(page)) { - if (baseRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); try (Block.Ref exponentRef = exponent.eval(page)) { - if (exponentRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); DoubleVector baseVector = baseBlock.asVector(); if (baseVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(base, exponent); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowDoubleEvaluator get(DriverContext context) { + return new PowDoubleEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowDoubleEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java index 28218b016707a..e6e36d9057286 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowIntEvaluator.java @@ -42,14 +42,8 @@ public PowIntEvaluator(Source source, EvalOperator.ExpressionEvaluator base, @Override public Block.Ref eval(Page page) { try (Block.Ref baseRef = base.eval(page)) { - if (baseRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); try (Block.Ref exponentRef = exponent.eval(page)) { - if (exponentRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); DoubleVector baseVector = baseBlock.asVector(); if (baseVector == null) { @@ -109,4 +103,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(base, exponent); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowIntEvaluator get(DriverContext context) { + return new PowIntEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowIntEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java index 8f15fbb359ffb..f634f4305e623 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowLongEvaluator.java @@ -42,14 +42,8 @@ public PowLongEvaluator(Source source, EvalOperator.ExpressionEvaluator base, @Override public Block.Ref eval(Page page) { try (Block.Ref baseRef = base.eval(page)) { - if (baseRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock baseBlock = (DoubleBlock) baseRef.block(); try (Block.Ref exponentRef = exponent.eval(page)) { - if (exponentRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock exponentBlock = (DoubleBlock) exponentRef.block(); DoubleVector baseVector = baseBlock.asVector(); if (baseVector == null) { @@ -109,4 +103,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(base, exponent); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory base; + + private final EvalOperator.ExpressionEvaluator.Factory exponent; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory base, + EvalOperator.ExpressionEvaluator.Factory exponent) { + this.source = source; + this.base = base; + this.exponent = exponent; + } + + @Override + public PowLongEvaluator get(DriverContext context) { + return new PowLongEvaluator(source, base.get(context), exponent.get(context), context); + } + + @Override + public String toString() { + return "PowLongEvaluator[" + "base=" + base + ", exponent=" + exponent + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java index 92c5077cee1e9..e9d602ff73cf4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleEvaluator.java @@ -37,14 +37,8 @@ public RoundDoubleEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); try (Block.Ref decimalsRef = decimals.eval(page)) { - if (decimalsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val, decimals); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory decimals; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory decimals) { + this.val = val; + this.decimals = decimals; + } + + @Override + public RoundDoubleEvaluator get(DriverContext context) { + return new RoundDoubleEvaluator(val.get(context), decimals.get(context), context); + } + + @Override + public String toString() { + return "RoundDoubleEvaluator[" + "val=" + val + ", decimals=" + decimals + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java index 3d51037b55235..0d94d317ea748 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundDoubleNoDecimalsEvaluator.java @@ -32,9 +32,6 @@ public RoundDoubleNoDecimalsEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -75,4 +72,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public RoundDoubleNoDecimalsEvaluator get(DriverContext context) { + return new RoundDoubleNoDecimalsEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "RoundDoubleNoDecimalsEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java index d9723b429e7a2..737055262ac5c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundIntEvaluator.java @@ -37,14 +37,8 @@ public RoundIntEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock valBlock = (IntBlock) valRef.block(); try (Block.Ref decimalsRef = decimals.eval(page)) { - if (decimalsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); IntVector valVector = valBlock.asVector(); if (valVector == null) { @@ -94,4 +88,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val, decimals); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory decimals; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory decimals) { + this.val = val; + this.decimals = decimals; + } + + @Override + public RoundIntEvaluator get(DriverContext context) { + return new RoundIntEvaluator(val.get(context), decimals.get(context), context); + } + + @Override + public String toString() { + return "RoundIntEvaluator[" + "val=" + val + ", decimals=" + decimals + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java index 43521218e7c80..79311138e3333 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundLongEvaluator.java @@ -35,14 +35,8 @@ public RoundLongEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); try (Block.Ref decimalsRef = decimals.eval(page)) { - if (decimalsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val, decimals); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory decimals; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory decimals) { + this.val = val; + this.decimals = decimals; + } + + @Override + public RoundLongEvaluator get(DriverContext context) { + return new RoundLongEvaluator(val.get(context), decimals.get(context), context); + } + + @Override + public String toString() { + return "RoundLongEvaluator[" + "val=" + val + ", decimals=" + decimals + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index d0d339719211b..d3487fb3215fb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -35,14 +35,8 @@ public RoundUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); try (Block.Ref decimalsRef = decimals.eval(page)) { - if (decimalsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock decimalsBlock = (LongBlock) decimalsRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val, decimals); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + private final EvalOperator.ExpressionEvaluator.Factory decimals; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val, + EvalOperator.ExpressionEvaluator.Factory decimals) { + this.val = val; + this.decimals = decimals; + } + + @Override + public RoundUnsignedLongEvaluator get(DriverContext context) { + return new RoundUnsignedLongEvaluator(val.get(context), decimals.get(context), context); + } + + @Override + public String toString() { + return "RoundUnsignedLongEvaluator[" + "val=" + val + ", decimals=" + decimals + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java index 57dc5e200469a..b01800662c1a0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinEvaluator.java @@ -31,9 +31,6 @@ public SinEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverCo @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public SinEvaluator get(DriverContext context) { + return new SinEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "SinEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java index 05082475132d4..751b32dbe839d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SinhEvaluator.java @@ -38,9 +38,6 @@ public SinhEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SinhEvaluator get(DriverContext context) { + return new SinhEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SinhEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java index 264a7e6c70dde..4f13a19c62084 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtDoubleEvaluator.java @@ -38,9 +38,6 @@ public SqrtDoubleEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SqrtDoubleEvaluator get(DriverContext context) { + return new SqrtDoubleEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SqrtDoubleEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java index 9ea795b7917ba..db6f173ae839a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtIntEvaluator.java @@ -39,9 +39,6 @@ public SqrtIntEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock valBlock = (IntBlock) valRef.block(); IntVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +89,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SqrtIntEvaluator get(DriverContext context) { + return new SqrtIntEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SqrtIntEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java index b64044ef1fb98..3ff2dce06dad7 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtLongEvaluator.java @@ -39,9 +39,6 @@ public SqrtLongEvaluator(Source source, EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -92,4 +89,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory val) { + this.source = source; + this.val = val; + } + + @Override + public SqrtLongEvaluator get(DriverContext context) { + return new SqrtLongEvaluator(source, val.get(context), context); + } + + @Override + public String toString() { + return "SqrtLongEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java index d0a64c44128bd..372b0f794dad8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/SqrtUnsignedLongEvaluator.java @@ -34,9 +34,6 @@ public SqrtUnsignedLongEvaluator(EvalOperator.ExpressionEvaluator val, @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock valBlock = (LongBlock) valRef.block(); LongVector valVector = valBlock.asVector(); if (valVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public SqrtUnsignedLongEvaluator get(DriverContext context) { + return new SqrtUnsignedLongEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "SqrtUnsignedLongEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java index 51f0b4efa5fd1..57628ab0f1e47 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanEvaluator.java @@ -31,9 +31,6 @@ public TanEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverCo @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public TanEvaluator get(DriverContext context) { + return new TanEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "TanEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java index be09dbf20ba46..89e5406455ec5 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/TanhEvaluator.java @@ -31,9 +31,6 @@ public TanhEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverC @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock valBlock = (DoubleBlock) valRef.block(); DoubleVector valVector = valBlock.asVector(); if (valVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public TanhEvaluator get(DriverContext context) { + return new TanhEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "TanhEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java index a72ed66bcab0a..58081a25c1e69 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgDoubleEvaluator.java @@ -85,4 +85,22 @@ public Block.Ref evalNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvAvgDoubleEvaluator get(DriverContext context) { + return new MvAvgDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvAvg[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java index c8c433766c157..eb807984feb7a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgIntEvaluator.java @@ -148,4 +148,22 @@ public Block.Ref evalSingleValuedNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvAvgIntEvaluator get(DriverContext context) { + return new MvAvgIntEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvAvg[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java index d2907cfc3bdf1..a81ec98c8d3e4 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgLongEvaluator.java @@ -148,4 +148,22 @@ public Block.Ref evalSingleValuedNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvAvgLongEvaluator get(DriverContext context) { + return new MvAvgLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvAvg[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java index cf8a6f00743e4..3700872c98e42 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgUnsignedLongEvaluator.java @@ -149,4 +149,22 @@ public Block.Ref evalSingleValuedNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvAvgUnsignedLongEvaluator get(DriverContext context) { + return new MvAvgUnsignedLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvAvg[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java index 2b1efbff7abf8..c3e7c600f259f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBooleanEvaluator.java @@ -135,4 +135,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMaxBooleanEvaluator get(DriverContext context) { + return new MvMaxBooleanEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMax[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java index 4f8a77708126c..0a125eaa9a579 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxBytesRefEvaluator.java @@ -144,4 +144,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMaxBytesRefEvaluator get(DriverContext context) { + return new MvMaxBytesRefEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMax[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java index dd176dfc1a64b..7912cb7f5ce5c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxDoubleEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMaxDoubleEvaluator get(DriverContext context) { + return new MvMaxDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMax[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java index f7492da9d463b..b42ec323fe7f8 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxIntEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMaxIntEvaluator get(DriverContext context) { + return new MvMaxIntEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMax[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java index b9fd4cb54d1a0..baaf5e49a3fa0 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxLongEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMaxLongEvaluator get(DriverContext context) { + return new MvMaxLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMax[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java index a63d52f6eab43..b8f122e99db0f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianDoubleEvaluator.java @@ -85,4 +85,22 @@ public Block.Ref evalNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMedianDoubleEvaluator get(DriverContext context) { + return new MvMedianDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMedian[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java index 94431da85fdb0..08e351c6c0122 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianIntEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMedianIntEvaluator get(DriverContext context) { + return new MvMedianIntEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMedian[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java index 66926f1dfb038..76bdea1ce761e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianLongEvaluator.java @@ -135,4 +135,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMedianLongEvaluator get(DriverContext context) { + return new MvMedianLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMedian[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java index b427fb25c3d84..23191d6a64ca1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianUnsignedLongEvaluator.java @@ -135,4 +135,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMedianUnsignedLongEvaluator get(DriverContext context) { + return new MvMedianUnsignedLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMedian[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java index 8d66b27e2c8da..0935a2f28569c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBooleanEvaluator.java @@ -135,4 +135,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMinBooleanEvaluator get(DriverContext context) { + return new MvMinBooleanEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMin[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java index 6b64cde3be6f2..cdc54a1f73990 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinBytesRefEvaluator.java @@ -144,4 +144,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMinBytesRefEvaluator get(DriverContext context) { + return new MvMinBytesRefEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMin[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java index bc4a942d3b348..ec3129cfd11be 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinDoubleEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMinDoubleEvaluator get(DriverContext context) { + return new MvMinDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMin[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java index f4ef6729bf598..58579ae9b0f91 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinIntEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMinIntEvaluator get(DriverContext context) { + return new MvMinIntEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMin[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java index 8720cbd82108e..89ecd6f5fcafc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinLongEvaluator.java @@ -134,4 +134,22 @@ private Block.Ref evalAscendingNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvMinLongEvaluator get(DriverContext context) { + return new MvMinLongEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvMin[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java index eddc8b7d0a69e..53e07d01b37dc 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumDoubleEvaluator.java @@ -85,4 +85,22 @@ public Block.Ref evalNotNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(EvalOperator.ExpressionEvaluator.Factory field) { + this.field = field; + } + + @Override + public MvSumDoubleEvaluator get(DriverContext context) { + return new MvSumDoubleEvaluator(field.get(context), context); + } + + @Override + public String toString() { + return "MvSum[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java index d9dcdd7239648..97dabfe927507 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumIntEvaluator.java @@ -69,4 +69,25 @@ public Block.Ref evalNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field) { + this.source = source; + this.field = field; + } + + @Override + public MvSumIntEvaluator get(DriverContext context) { + return new MvSumIntEvaluator(source, field.get(context), context); + } + + @Override + public String toString() { + return "MvSum[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java index 15dc035d3314e..dbdeca3d5d3e3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumLongEvaluator.java @@ -69,4 +69,25 @@ public Block.Ref evalNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field) { + this.source = source; + this.field = field; + } + + @Override + public MvSumLongEvaluator get(DriverContext context) { + return new MvSumLongEvaluator(source, field.get(context), context); + } + + @Override + public String toString() { + return "MvSum[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java index fb79d99209bcc..d92e61744a89d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSumUnsignedLongEvaluator.java @@ -69,4 +69,25 @@ public Block.Ref evalNullable(Block.Ref ref) { } } } + + public static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory field; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory field) { + this.source = source; + this.field = field; + } + + @Override + public MvSumUnsignedLongEvaluator get(DriverContext context) { + return new MvSumUnsignedLongEvaluator(source, field.get(context), context); + } + + @Override + public String toString() { + return "MvSum[field=" + field + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java index 0436bb6ada170..70b9f19715879 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatEvaluator.java @@ -7,6 +7,7 @@ import java.lang.Override; import java.lang.String; import java.util.Arrays; +import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -43,11 +44,7 @@ public Block.Ref eval(Page page) { BytesRefBlock[] valuesBlocks = new BytesRefBlock[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { valuesRefs[i] = values[i].eval(page); - Block block = valuesRefs[i].block(); - if (block.areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } - valuesBlocks[i] = (BytesRefBlock) block; + valuesBlocks[i] = (BytesRefBlock) valuesRefs[i].block(); } BytesRefVector[] valuesVectors = new BytesRefVector[values.length]; for (int i = 0; i < valuesBlocks.length; i++) { @@ -112,4 +109,27 @@ public String toString() { public void close() { Releasables.closeExpectNoException(scratch, () -> Releasables.close(values)); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Function scratch; + + private final EvalOperator.ExpressionEvaluator.Factory[] values; + + public Factory(Function scratch, + EvalOperator.ExpressionEvaluator.Factory[] values) { + this.scratch = scratch; + this.values = values; + } + + @Override + public ConcatEvaluator get(DriverContext context) { + EvalOperator.ExpressionEvaluator[] values = Arrays.stream(this.values).map(a -> a.get(context)).toArray(EvalOperator.ExpressionEvaluator[]::new); + return new ConcatEvaluator(scratch.apply(context), values, context); + } + + @Override + public String toString() { + return "ConcatEvaluator[" + "values=" + Arrays.toString(values) + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java index 361a4f791f925..a701cd81a5a4f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWithEvaluator.java @@ -38,14 +38,8 @@ public EndsWithEvaluator(EvalOperator.ExpressionEvaluator str, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref suffixRef = suffix.eval(page)) { - if (suffixRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock suffixBlock = (BytesRefBlock) suffixRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -100,4 +94,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, suffix); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory suffix; + + public Factory(EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory suffix) { + this.str = str; + this.suffix = suffix; + } + + @Override + public EndsWithEvaluator get(DriverContext context) { + return new EndsWithEvaluator(str.get(context), suffix.get(context), context); + } + + @Override + public String toString() { + return "EndsWithEvaluator[" + "str=" + str + ", suffix=" + suffix + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java index 7abfb07ee64d4..27b2a10d4dc54 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrimEvaluator.java @@ -32,9 +32,6 @@ public LTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driver @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); BytesRefVector valVector = valBlock.asVector(); if (valVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public LTrimEvaluator get(DriverContext context) { + return new LTrimEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "LTrimEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java index 818b702e60b34..3514f812ecfe1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LeftEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.data.Block; @@ -46,14 +47,8 @@ public LeftEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref lengthRef = length.eval(page)) { - if (lengthRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lengthBlock = (IntBlock) lengthRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -105,4 +100,34 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, length); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Function out; + + private final Function cp; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory length; + + public Factory(Function out, + Function cp, + EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory length) { + this.out = out; + this.cp = cp; + this.str = str; + this.length = length; + } + + @Override + public LeftEvaluator get(DriverContext context) { + return new LeftEvaluator(out.apply(context), cp.apply(context), str.get(context), length.get(context), context); + } + + @Override + public String toString() { + return "LeftEvaluator[" + "str=" + str + ", length=" + length + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java index 9345551875ad4..b91a2389c60fe 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/LengthEvaluator.java @@ -34,9 +34,6 @@ public LengthEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext drive @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); BytesRefVector valVector = valBlock.asVector(); if (valVector == null) { @@ -79,4 +76,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public LengthEvaluator get(DriverContext context) { + return new LengthEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "LengthEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java index b9504b4305431..0cac1ac9380b3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrimEvaluator.java @@ -32,9 +32,6 @@ public RTrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driver @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); BytesRefVector valVector = valBlock.asVector(); if (valVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public RTrimEvaluator get(DriverContext context) { + return new RTrimEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "RTrimEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java index c37abac4ff689..3baf32446ab4d 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceConstantEvaluator.java @@ -46,14 +46,8 @@ public ReplaceConstantEvaluator(Source source, EvalOperator.ExpressionEvaluator @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref newStrRef = newStr.eval(page)) { - if (newStrRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock newStrBlock = (BytesRefBlock) newStrRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -118,4 +112,32 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, newStr); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final Pattern regex; + + private final EvalOperator.ExpressionEvaluator.Factory newStr; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, Pattern regex, + EvalOperator.ExpressionEvaluator.Factory newStr) { + this.source = source; + this.str = str; + this.regex = regex; + this.newStr = newStr; + } + + @Override + public ReplaceConstantEvaluator get(DriverContext context) { + return new ReplaceConstantEvaluator(source, str.get(context), regex, newStr.get(context), context); + } + + @Override + public String toString() { + return "ReplaceConstantEvaluator[" + "str=" + str + ", regex=" + regex + ", newStr=" + newStr + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java index f8e3ca32e8731..b2738d52b2383 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/ReplaceEvaluator.java @@ -46,19 +46,10 @@ public ReplaceEvaluator(Source source, EvalOperator.ExpressionEvaluator str, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref regexRef = regex.eval(page)) { - if (regexRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock regexBlock = (BytesRefBlock) regexRef.block(); try (Block.Ref newStrRef = newStr.eval(page)) { - if (newStrRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock newStrBlock = (BytesRefBlock) newStrRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -135,4 +126,33 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, regex, newStr); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory regex; + + private final EvalOperator.ExpressionEvaluator.Factory newStr; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory regex, + EvalOperator.ExpressionEvaluator.Factory newStr) { + this.source = source; + this.str = str; + this.regex = regex; + this.newStr = newStr; + } + + @Override + public ReplaceEvaluator get(DriverContext context) { + return new ReplaceEvaluator(source, str.get(context), regex.get(context), newStr.get(context), context); + } + + @Override + public String toString() { + return "ReplaceEvaluator[" + "str=" + str + ", regex=" + regex + ", newStr=" + newStr + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java index ad8bee9d41e1d..e8b70f1ad416c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/RightEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.compute.data.Block; @@ -46,14 +47,8 @@ public RightEvaluator(BytesRef out, UnicodeUtil.UTF8CodePoint cp, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref lengthRef = length.eval(page)) { - if (lengthRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lengthBlock = (IntBlock) lengthRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -105,4 +100,34 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, length); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Function out; + + private final Function cp; + + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory length; + + public Factory(Function out, + Function cp, + EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory length) { + this.out = out; + this.cp = cp; + this.str = str; + this.length = length; + } + + @Override + public RightEvaluator get(DriverContext context) { + return new RightEvaluator(out.apply(context), cp.apply(context), str.get(context), length.get(context), context); + } + + @Override + public String toString() { + return "RightEvaluator[" + "str=" + str + ", length=" + length + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java index 7b8e2a34bafdd..5a76c991b7d01 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitSingleByteEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -39,9 +40,6 @@ public SplitSingleByteEvaluator(EvalOperator.ExpressionEvaluator str, byte delim @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -84,4 +82,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final byte delim; + + private final Function scratch; + + public Factory(EvalOperator.ExpressionEvaluator.Factory str, byte delim, + Function scratch) { + this.str = str; + this.delim = delim; + this.scratch = scratch; + } + + @Override + public SplitSingleByteEvaluator get(DriverContext context) { + return new SplitSingleByteEvaluator(str.get(context), delim, scratch.apply(context), context); + } + + @Override + public String toString() { + return "SplitSingleByteEvaluator[" + "str=" + str + ", delim=" + delim + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java index f83393c7e8293..a81d1485a7c57 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitVariableEvaluator.java @@ -6,6 +6,7 @@ import java.lang.Override; import java.lang.String; +import java.util.function.Function; import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; @@ -39,14 +40,8 @@ public SplitVariableEvaluator(EvalOperator.ExpressionEvaluator str, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref delimRef = delim.eval(page)) { - if (delimRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock delimBlock = (BytesRefBlock) delimRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -101,4 +96,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, delim); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory delim; + + private final Function scratch; + + public Factory(EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory delim, Function scratch) { + this.str = str; + this.delim = delim; + this.scratch = scratch; + } + + @Override + public SplitVariableEvaluator get(DriverContext context) { + return new SplitVariableEvaluator(str.get(context), delim.get(context), scratch.apply(context), context); + } + + @Override + public String toString() { + return "SplitVariableEvaluator[" + "str=" + str + ", delim=" + delim + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java index 25e9d7b2732d1..752cc4b8ad946 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWithEvaluator.java @@ -38,14 +38,8 @@ public StartsWithEvaluator(EvalOperator.ExpressionEvaluator str, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref prefixRef = prefix.eval(page)) { - if (prefixRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock prefixBlock = (BytesRefBlock) prefixRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -100,4 +94,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, prefix); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory prefix; + + public Factory(EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory prefix) { + this.str = str; + this.prefix = prefix; + } + + @Override + public StartsWithEvaluator get(DriverContext context) { + return new StartsWithEvaluator(str.get(context), prefix.get(context), context); + } + + @Override + public String toString() { + return "StartsWithEvaluator[" + "str=" + str + ", prefix=" + prefix + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java index 411d1fd864333..9079f0ea84f55 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringEvaluator.java @@ -42,19 +42,10 @@ public SubstringEvaluator(EvalOperator.ExpressionEvaluator str, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref startRef = start.eval(page)) { - if (startRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock startBlock = (IntBlock) startRef.block(); try (Block.Ref lengthRef = length.eval(page)) { - if (lengthRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lengthBlock = (IntBlock) lengthRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -117,4 +108,30 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, start, length); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory start; + + private final EvalOperator.ExpressionEvaluator.Factory length; + + public Factory(EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory start, + EvalOperator.ExpressionEvaluator.Factory length) { + this.str = str; + this.start = start; + this.length = length; + } + + @Override + public SubstringEvaluator get(DriverContext context) { + return new SubstringEvaluator(str.get(context), start.get(context), length.get(context), context); + } + + @Override + public String toString() { + return "SubstringEvaluator[" + "str=" + str + ", start=" + start + ", length=" + length + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java index d393f2b6c5409..115162175e720 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/SubstringNoLengthEvaluator.java @@ -38,14 +38,8 @@ public SubstringNoLengthEvaluator(EvalOperator.ExpressionEvaluator str, @Override public Block.Ref eval(Page page) { try (Block.Ref strRef = str.eval(page)) { - if (strRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock strBlock = (BytesRefBlock) strRef.block(); try (Block.Ref startRef = start.eval(page)) { - if (startRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock startBlock = (IntBlock) startRef.block(); BytesRefVector strVector = strBlock.asVector(); if (strVector == null) { @@ -97,4 +91,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(str, start); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory str; + + private final EvalOperator.ExpressionEvaluator.Factory start; + + public Factory(EvalOperator.ExpressionEvaluator.Factory str, + EvalOperator.ExpressionEvaluator.Factory start) { + this.str = str; + this.start = start; + } + + @Override + public SubstringNoLengthEvaluator get(DriverContext context) { + return new SubstringNoLengthEvaluator(str.get(context), start.get(context), context); + } + + @Override + public String toString() { + return "SubstringNoLengthEvaluator[" + "str=" + str + ", start=" + start + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java index 8ee20059ba000..939715c718ef9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/function/scalar/string/TrimEvaluator.java @@ -32,9 +32,6 @@ public TrimEvaluator(EvalOperator.ExpressionEvaluator val, DriverContext driverC @Override public Block.Ref eval(Page page) { try (Block.Ref valRef = val.eval(page)) { - if (valRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } BytesRefBlock valBlock = (BytesRefBlock) valRef.block(); BytesRefVector valVector = valBlock.asVector(); if (valVector == null) { @@ -77,4 +74,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(val); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory val; + + public Factory(EvalOperator.ExpressionEvaluator.Factory val) { + this.val = val; + } + + @Override + public TrimEvaluator get(DriverContext context) { + return new TrimEvaluator(val.get(context), context); + } + + @Override + public String toString() { + return "TrimEvaluator[" + "val=" + val + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java index ca6088fad8652..40e4513629b2f 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDatetimesEvaluator.java @@ -43,9 +43,6 @@ public AddDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator dat @Override public Block.Ref eval(Page page) { try (Block.Ref datetimeRef = datetime.eval(page)) { - if (datetimeRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock datetimeBlock = (LongBlock) datetimeRef.block(); LongVector datetimeVector = datetimeBlock.asVector(); if (datetimeVector == null) { @@ -96,4 +93,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(datetime); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory datetime; + + private final TemporalAmount temporalAmount; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory datetime, + TemporalAmount temporalAmount) { + this.source = source; + this.datetime = datetime; + this.temporalAmount = temporalAmount; + } + + @Override + public AddDatetimesEvaluator get(DriverContext context) { + return new AddDatetimesEvaluator(source, datetime.get(context), temporalAmount, context); + } + + @Override + public String toString() { + return "AddDatetimesEvaluator[" + "datetime=" + datetime + ", temporalAmount=" + temporalAmount + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java index 016c6c501dd88..7f86fde38ec9b 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddDoublesEvaluator.java @@ -35,14 +35,8 @@ public AddDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public AddDoublesEvaluator get(DriverContext context) { + return new AddDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "AddDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java index 00c7ffa4164c7..96c34aa79df9c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddIntsEvaluator.java @@ -41,14 +41,8 @@ public AddIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public AddIntsEvaluator get(DriverContext context) { + return new AddIntsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "AddIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java index 09b90f9357341..9158d6f92373c 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddLongsEvaluator.java @@ -41,14 +41,8 @@ public AddLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public AddLongsEvaluator get(DriverContext context) { + return new AddLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "AddLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java index 5dfa2b51cb000..6e85e253856d2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddUnsignedLongsEvaluator.java @@ -41,14 +41,8 @@ public AddUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public AddUnsignedLongsEvaluator get(DriverContext context) { + return new AddUnsignedLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "AddUnsignedLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java index 50ba6515e5004..8bbce4faf25d1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivDoublesEvaluator.java @@ -35,14 +35,8 @@ public DivDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public DivDoublesEvaluator get(DriverContext context) { + return new DivDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "DivDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java index b3c6b9ea2b5e6..87933d3ef26c2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivIntsEvaluator.java @@ -41,14 +41,8 @@ public DivIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public DivIntsEvaluator get(DriverContext context) { + return new DivIntsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "DivIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java index 6cd600c1089be..f23af932b61f3 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivLongsEvaluator.java @@ -41,14 +41,8 @@ public DivLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public DivLongsEvaluator get(DriverContext context) { + return new DivLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "DivLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java index c565a7b7ee55b..09de6b0c37527 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DivUnsignedLongsEvaluator.java @@ -41,14 +41,8 @@ public DivUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public DivUnsignedLongsEvaluator get(DriverContext context) { + return new DivUnsignedLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "DivUnsignedLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java index 5b2588a761cfd..96888b6a6fcd1 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModDoublesEvaluator.java @@ -35,14 +35,8 @@ public ModDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public ModDoublesEvaluator get(DriverContext context) { + return new ModDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "ModDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java index 1e98bf4cd124f..047beb241ed70 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModIntsEvaluator.java @@ -41,14 +41,8 @@ public ModIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public ModIntsEvaluator get(DriverContext context) { + return new ModIntsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "ModIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java index 4a9f1c6829994..706ccee418465 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModLongsEvaluator.java @@ -41,14 +41,8 @@ public ModLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public ModLongsEvaluator get(DriverContext context) { + return new ModLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "ModLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java index dc86ebd8da292..ede1fbce65458 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/ModUnsignedLongsEvaluator.java @@ -41,14 +41,8 @@ public ModUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public ModUnsignedLongsEvaluator get(DriverContext context) { + return new ModUnsignedLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "ModUnsignedLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java index 3c941a8189b92..df9105b9a5b16 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulDoublesEvaluator.java @@ -35,14 +35,8 @@ public MulDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public MulDoublesEvaluator get(DriverContext context) { + return new MulDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "MulDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java index 180c7d9301efe..f57fa9aa71612 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulIntsEvaluator.java @@ -41,14 +41,8 @@ public MulIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public MulIntsEvaluator get(DriverContext context) { + return new MulIntsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "MulIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java index ed59fb7600944..ce045a7be9d17 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulLongsEvaluator.java @@ -41,14 +41,8 @@ public MulLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public MulLongsEvaluator get(DriverContext context) { + return new MulLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "MulLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java index 841da4f6cd251..cd02f11c4a4fb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/MulUnsignedLongsEvaluator.java @@ -41,14 +41,8 @@ public MulUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public MulUnsignedLongsEvaluator get(DriverContext context) { + return new MulUnsignedLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "MulUnsignedLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java index a15407b0102f8..dbe3e02afeb0a 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegDoublesEvaluator.java @@ -31,9 +31,6 @@ public NegDoublesEvaluator(EvalOperator.ExpressionEvaluator v, DriverContext dri @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock vBlock = (DoubleBlock) vRef.block(); DoubleVector vVector = vBlock.asVector(); if (vVector == null) { @@ -74,4 +71,22 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(EvalOperator.ExpressionEvaluator.Factory v) { + this.v = v; + } + + @Override + public NegDoublesEvaluator get(DriverContext context) { + return new NegDoublesEvaluator(v.get(context), context); + } + + @Override + public String toString() { + return "NegDoublesEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java index c71ec463e84ed..908b0197d347e 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegIntsEvaluator.java @@ -38,9 +38,6 @@ public NegIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock vBlock = (IntBlock) vRef.block(); IntVector vVector = vBlock.asVector(); if (vVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; + this.v = v; + } + + @Override + public NegIntsEvaluator get(DriverContext context) { + return new NegIntsEvaluator(source, v.get(context), context); + } + + @Override + public String toString() { + return "NegIntsEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java index 70f465d715977..6a773e6eceba9 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/NegLongsEvaluator.java @@ -38,9 +38,6 @@ public NegLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator v, @Override public Block.Ref eval(Page page) { try (Block.Ref vRef = v.eval(page)) { - if (vRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock vBlock = (LongBlock) vRef.block(); LongVector vVector = vBlock.asVector(); if (vVector == null) { @@ -91,4 +88,25 @@ public String toString() { public void close() { Releasables.closeExpectNoException(v); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory v; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory v) { + this.source = source; + this.v = v; + } + + @Override + public NegLongsEvaluator get(DriverContext context) { + return new NegLongsEvaluator(source, v.get(context), context); + } + + @Override + public String toString() { + return "NegLongsEvaluator[" + "v=" + v + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java index 25240f9b0ac7f..b72bed55d0afb 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDatetimesEvaluator.java @@ -43,9 +43,6 @@ public SubDatetimesEvaluator(Source source, EvalOperator.ExpressionEvaluator dat @Override public Block.Ref eval(Page page) { try (Block.Ref datetimeRef = datetime.eval(page)) { - if (datetimeRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock datetimeBlock = (LongBlock) datetimeRef.block(); LongVector datetimeVector = datetimeBlock.asVector(); if (datetimeVector == null) { @@ -96,4 +93,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(datetime); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory datetime; + + private final TemporalAmount temporalAmount; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory datetime, + TemporalAmount temporalAmount) { + this.source = source; + this.datetime = datetime; + this.temporalAmount = temporalAmount; + } + + @Override + public SubDatetimesEvaluator get(DriverContext context) { + return new SubDatetimesEvaluator(source, datetime.get(context), temporalAmount, context); + } + + @Override + public String toString() { + return "SubDatetimesEvaluator[" + "datetime=" + datetime + ", temporalAmount=" + temporalAmount + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java index 9e873eaef85b1..79b064b105c68 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubDoublesEvaluator.java @@ -35,14 +35,8 @@ public SubDoublesEvaluator(EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock lhsBlock = (DoubleBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } DoubleBlock rhsBlock = (DoubleBlock) rhsRef.block(); DoubleVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -92,4 +86,26 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public SubDoublesEvaluator get(DriverContext context) { + return new SubDoublesEvaluator(lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "SubDoublesEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java index 90ac7b908648f..4cf1cf68abff2 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubIntsEvaluator.java @@ -41,14 +41,8 @@ public SubIntsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock lhsBlock = (IntBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } IntBlock rhsBlock = (IntBlock) rhsRef.block(); IntVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public SubIntsEvaluator get(DriverContext context) { + return new SubIntsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "SubIntsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java index 8cf7a07cc1761..65ec31066e9db 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubLongsEvaluator.java @@ -41,14 +41,8 @@ public SubLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator lhs, @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public SubLongsEvaluator get(DriverContext context) { + return new SubLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "SubLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java index aa0e1bf842971..9879dcadc1239 100644 --- a/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java +++ b/x-pack/plugin/esql/src/main/java/generated/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/SubUnsignedLongsEvaluator.java @@ -41,14 +41,8 @@ public SubUnsignedLongsEvaluator(Source source, EvalOperator.ExpressionEvaluator @Override public Block.Ref eval(Page page) { try (Block.Ref lhsRef = lhs.eval(page)) { - if (lhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock lhsBlock = (LongBlock) lhsRef.block(); try (Block.Ref rhsRef = rhs.eval(page)) { - if (rhsRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } LongBlock rhsBlock = (LongBlock) rhsRef.block(); LongVector lhsVector = lhsBlock.asVector(); if (lhsVector == null) { @@ -108,4 +102,29 @@ public String toString() { public void close() { Releasables.closeExpectNoException(lhs, rhs); } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory lhs; + + private final EvalOperator.ExpressionEvaluator.Factory rhs; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory lhs, + EvalOperator.ExpressionEvaluator.Factory rhs) { + this.source = source; + this.lhs = lhs; + this.rhs = rhs; + } + + @Override + public SubUnsignedLongsEvaluator get(DriverContext context) { + return new SubUnsignedLongsEvaluator(source, lhs.get(context), rhs.get(context), context); + } + + @Override + public String toString() { + return "SubUnsignedLongsEvaluator[" + "lhs=" + lhs + ", rhs=" + rhs + "]"; + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java index facff81033cb8..ee641cd9209a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java @@ -26,6 +26,7 @@ import static org.elasticsearch.xpack.esql.formatter.TextFormat.CSV; import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; +import static org.elasticsearch.xpack.ql.util.LoggingUtils.logOnFailure; /** * Listens for a single {@link EsqlQueryResponse}, builds a corresponding {@link RestResponse} and sends it. @@ -162,8 +163,9 @@ public ActionListener wrapWithLogging() { }, ex -> { // In case of failure, stop the time manually before sending out the response. long timeMillis = stopWatch.stop().getMillis(); - onFailure(ex); LOGGER.info("Failed execution of ESQL query.\nQuery string: [{}]\nExecution time: [{}]ms", esqlQuery, timeMillis); + logOnFailure(LOGGER, ex); + onFailure(ex); }); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 4db49c4d76c89..557da9639a086 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Keep; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Rename; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -324,9 +325,29 @@ protected LogicalPlan doRule(LogicalPlan plan) { return resolveEnrich(p, childrenOutput); } + if (plan instanceof MvExpand p) { + return resolveMvExpand(p, childrenOutput); + } + return plan.transformExpressionsUp(UnresolvedAttribute.class, ua -> maybeResolveAttribute(ua, childrenOutput)); } + private LogicalPlan resolveMvExpand(MvExpand p, List childrenOutput) { + if (p.target() instanceof UnresolvedAttribute ua) { + Attribute resolved = maybeResolveAttribute(ua, childrenOutput); + if (resolved == ua) { + return p; + } + return new MvExpand( + p.source(), + p.child(), + resolved, + new ReferenceAttribute(resolved.source(), resolved.name(), resolved.dataType(), null, resolved.nullable(), null, false) + ); + } + return p; + } + private Attribute maybeResolveAttribute(UnresolvedAttribute ua, List childrenOutput) { if (ua.customMessage()) { return ua; @@ -501,11 +522,7 @@ private LogicalPlan resolveEnrich(Enrich enrich, List childrenOutput) } if (resolved.resolved() && resolved.dataType() != KEYWORD) { resolved = ua.withUnresolvedMessage( - "Unsupported type [" - + resolved.dataType() - + "] for enrich matching field [" - + ua.name() - + "]; only KEYWORD allowed" + "Unsupported type [" + resolved.dataType() + "] for enrich matching field [" + ua.name() + "]; only KEYWORD allowed" ); } return new Enrich(enrich.source(), enrich.child(), enrich.policyName(), resolved, enrich.policy(), enrich.enrichFields()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index af8732ad9c969..98c1397d97860 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -30,7 +30,7 @@ import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.ValueSources; +import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -250,13 +250,14 @@ private void doLookup( NamedExpression extractField = extractFields.get(i); final ElementType elementType = LocalExecutionPlanner.toElementType(extractField.dataType()); mergingTypes[i] = elementType; - var sources = ValueSources.sources( + var sources = BlockReaderFactories.factories( List.of(searchContext), extractField instanceof Alias a ? ((NamedExpression) a.child()).name() : extractField.name(), - EsqlDataTypes.isUnsupported(extractField.dataType()), - elementType + EsqlDataTypes.isUnsupported(extractField.dataType()) + ); + intermediateOperators.add( + new ValuesSourceReaderOperator(BlockFactory.getNonBreakingInstance(), sources, 0, extractField.name()) ); - intermediateOperators.add(new ValuesSourceReaderOperator(sources, 0, extractField.name())); } // drop docs block intermediateOperators.add(droppingBlockOperator(extractFields.size() + 2, 0)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 53a915046b45f..b9ef94d587556 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -172,8 +172,18 @@ public Block.Ref eval(Page page) { @Override public void close() {} } - int channel = layout.get(attr.id()).channel(); - return driverContext -> new Attribute(channel); + record AttributeFactory(int channel) implements ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext driverContext) { + return new Attribute(channel); + } + + @Override + public String toString() { + return "Attribute[channel=" + channel + "]"; + } + } + return new AttributeFactory(layout.get(attr.id()).channel()); } } @@ -195,7 +205,18 @@ public String toString() { @Override public void close() {} } - return context -> new LiteralsEvaluator(context, lit); + record LiteralsEvaluatorFactory(Literal lit) implements ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext driverContext) { + return new LiteralsEvaluator(driverContext, lit); + } + + @Override + public String toString() { + return "LiteralsEvaluator[lit=" + lit + "]"; + } + } + return new LiteralsEvaluatorFactory(lit); } private static Block block(Literal lit, BlockFactory blockFactory, int positions) { @@ -209,7 +230,9 @@ private static Block block(Literal lit, BlockFactory blockFactory, int positions return Block.constantNullBlock(positions, blockFactory); } var wrapper = BlockUtils.wrapperFor(blockFactory, ElementType.fromJava(multiValue.get(0).getClass()), positions); - wrapper.accept(multiValue); + for (int i = 0; i < positions; i++) { + wrapper.accept(multiValue); + } return wrapper.builder().build(); } return BlockUtils.constantBlock(blockFactory, value, positions); @@ -221,12 +244,22 @@ static class IsNulls extends ExpressionMapper { @Override public ExpressionEvaluator.Factory map(IsNull isNull, Layout layout) { var field = toEvaluator(isNull.field(), layout); - return driverContext -> new IsNullEvaluator(driverContext, field.get(driverContext)); + return new IsNullEvaluatorFactory(field); } - record IsNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEvaluator field) - implements - EvalOperator.ExpressionEvaluator { + record IsNullEvaluatorFactory(EvalOperator.ExpressionEvaluator.Factory field) implements ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new IsNullEvaluator(context, field.get(context)); + } + + @Override + public String toString() { + return "IsNullEvaluator[field=" + field + ']'; + } + } + + record IsNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEvaluator field) implements ExpressionEvaluator { @Override public Block.Ref eval(Page page) { try (Block.Ref fieldBlock = field.eval(page)) { @@ -256,7 +289,7 @@ public void close() { @Override public String toString() { - return "IsNullEvaluator[" + "field=" + field + ']'; + return "IsNullEvaluator[field=" + field + ']'; } } } @@ -265,8 +298,19 @@ static class IsNotNulls extends ExpressionMapper { @Override public ExpressionEvaluator.Factory map(IsNotNull isNotNull, Layout layout) { - var field = toEvaluator(isNotNull.field(), layout); - return driverContext -> new IsNotNullEvaluator(driverContext, field.get(driverContext)); + return new IsNotNullEvaluatorFactory(toEvaluator(isNotNull.field(), layout)); + } + + record IsNotNullEvaluatorFactory(EvalOperator.ExpressionEvaluator.Factory field) implements ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new IsNotNullEvaluator(context, field.get(context)); + } + + @Override + public String toString() { + return "IsNotNullEvaluator[field=" + field + ']'; + } } record IsNotNullEvaluator(DriverContext driverContext, EvalOperator.ExpressionEvaluator field) @@ -301,7 +345,7 @@ public void close() { @Override public String toString() { - return "IsNotNullEvaluator[" + "field=" + field + ']'; + return "IsNotNullEvaluator[field=" + field + ']'; } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java index cb10499ae6d0b..f609bb5491569 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/ComparisonMapper.java @@ -7,9 +7,6 @@ package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; -import org.elasticsearch.common.TriFunction; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.ExpressionMapper; @@ -18,75 +15,76 @@ import org.elasticsearch.xpack.esql.type.EsqlDataTypeRegistry; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.ql.tree.Source; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; +import java.util.function.BiFunction; + import static org.elasticsearch.xpack.esql.evaluator.EvalMapper.toEvaluator; public abstract class ComparisonMapper extends ExpressionMapper { public static final ExpressionMapper EQUALS = new ComparisonMapper( - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsIntsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsLongsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsDoublesEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsKeywordsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsBoolsEvaluator::new + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsIntsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsLongsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsDoublesEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsKeywordsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.EqualsBoolsEvaluator.Factory::new ) { }; public static final ExpressionMapper NOT_EQUALS = new ComparisonMapper( - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsIntsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsLongsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsDoublesEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsKeywordsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsBoolsEvaluator::new + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsIntsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsLongsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsDoublesEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsKeywordsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEqualsBoolsEvaluator.Factory::new ) { }; public static final ExpressionMapper GREATER_THAN = new ComparisonMapper( - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanIntsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanLongsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanDoublesEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanKeywordsEvaluator::new + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanIntsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanLongsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanDoublesEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanKeywordsEvaluator.Factory::new ) { }; public static final ExpressionMapper GREATER_THAN_OR_EQUAL = new ComparisonMapper( - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualIntsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualLongsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualDoublesEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualKeywordsEvaluator::new + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualIntsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualLongsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualDoublesEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThanOrEqualKeywordsEvaluator.Factory::new ) { }; public static final ExpressionMapper LESS_THAN = new ComparisonMapper( - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanIntsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanLongsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanDoublesEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanKeywordsEvaluator::new + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanIntsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanLongsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanDoublesEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanKeywordsEvaluator.Factory::new ) { }; public static final ExpressionMapper LESS_THAN_OR_EQUAL = new ComparisonMapper( - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualIntsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualLongsEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualDoublesEvaluator::new, - org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualKeywordsEvaluator::new + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualIntsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualLongsEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualDoublesEvaluator.Factory::new, + org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.LessThanOrEqualKeywordsEvaluator.Factory::new ) { }; - private final TriFunction ints; - private final TriFunction longs; - private final TriFunction doubles; - private final TriFunction keywords; - private final TriFunction bools; + private final BiFunction ints; + private final BiFunction longs; + private final BiFunction doubles; + private final BiFunction keywords; + private final BiFunction bools; private ComparisonMapper( - TriFunction ints, - TriFunction longs, - TriFunction doubles, - TriFunction keywords, - TriFunction bools + BiFunction ints, + BiFunction longs, + BiFunction doubles, + BiFunction keywords, + BiFunction bools ) { this.ints = ints; this.longs = longs; @@ -96,16 +94,16 @@ private ComparisonMapper( } ComparisonMapper( - TriFunction ints, - TriFunction longs, - TriFunction doubles, - TriFunction keywords + BiFunction ints, + BiFunction longs, + BiFunction doubles, + BiFunction keywords ) { this.ints = ints; this.longs = longs; this.doubles = doubles; this.keywords = keywords; - this.bools = (lhs, rhs, dvrCtx) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; + this.bools = (lhs, rhs) -> { throw EsqlIllegalArgumentException.illegalDataType(DataTypes.BOOLEAN); }; } @Override @@ -130,13 +128,13 @@ public final ExpressionEvaluator.Factory map(BinaryComparison bc, Layout layout) var leftEval = toEvaluator(bc.left(), layout); var rightEval = toEvaluator(bc.right(), layout); if (leftType == DataTypes.KEYWORD || leftType == DataTypes.TEXT || leftType == DataTypes.IP || leftType == DataTypes.VERSION) { - return dvrCtx -> keywords.apply(leftEval.get(dvrCtx), rightEval.get(dvrCtx), dvrCtx); + return keywords.apply(leftEval, rightEval); } if (leftType == DataTypes.BOOLEAN) { - return dvrCtx -> bools.apply(leftEval.get(dvrCtx), rightEval.get(dvrCtx), dvrCtx); + return bools.apply(leftEval, rightEval); } if (leftType == DataTypes.DATETIME) { - return dvrCtx -> longs.apply(leftEval.get(dvrCtx), rightEval.get(dvrCtx), dvrCtx); + return longs.apply(leftEval, rightEval); } throw new EsqlIllegalArgumentException("resolved type for [" + bc + "] but didn't implement mapping"); } @@ -145,25 +143,10 @@ public static ExpressionEvaluator.Factory castToEvaluator( BinaryOperator op, Layout layout, DataType required, - TriFunction buildEvaluator - ) { - var lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); - var rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); - return dvrCtx -> buildEvaluator.apply(lhs.get(dvrCtx), rhs.get(dvrCtx), dvrCtx); - } - - public static ExpressionEvaluator.Factory castToEvaluatorWithSource( - BinaryOperator op, - Layout layout, - DataType required, - TriFunction< - Source, - EvalOperator.ExpressionEvaluator, - EvalOperator.ExpressionEvaluator, - EvalOperator.ExpressionEvaluator> buildEvaluator + BiFunction factory ) { var lhs = Cast.cast(op.left().dataType(), required, toEvaluator(op.left(), layout)); var rhs = Cast.cast(op.right().dataType(), required, toEvaluator(op.right(), layout)); - return dvrCtx -> buildEvaluator.apply(op.source(), lhs.get(dvrCtx), rhs.get(dvrCtx)); + return factory.apply(lhs, rhs); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java index eaf148c27c3ee..e90510461551f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/execution/PlanExecutor.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.esql.analysis.Verifier; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.Mapper; @@ -30,7 +31,6 @@ public class PlanExecutor { private final IndexResolver indexResolver; private final PreAnalyzer preAnalyzer; private final FunctionRegistry functionRegistry; - private final LogicalPlanOptimizer logicalPlanOptimizer; private final Mapper mapper; private final Metrics metrics; private final Verifier verifier; @@ -39,7 +39,6 @@ public PlanExecutor(IndexResolver indexResolver) { this.indexResolver = indexResolver; this.preAnalyzer = new PreAnalyzer(); this.functionRegistry = new EsqlFunctionRegistry(); - this.logicalPlanOptimizer = new LogicalPlanOptimizer(); this.mapper = new Mapper(functionRegistry); this.metrics = new Metrics(); this.verifier = new Verifier(metrics); @@ -59,7 +58,7 @@ public void esql( enrichPolicyResolver, preAnalyzer, functionRegistry, - logicalPlanOptimizer, + new LogicalPlanOptimizer(new LogicalOptimizerContext(cfg)), mapper, verifier ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java index 2e20af1889773..85330c80750e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/CountDistinct.java @@ -87,7 +87,7 @@ public AggregatorFunctionSupplier supplier(BigArrays bigArrays, List in if (type == DataTypes.DOUBLE) { return new CountDistinctDoubleAggregatorFunctionSupplier(bigArrays, inputChannels, precision); } - if (type == DataTypes.KEYWORD || type == DataTypes.IP) { + if (type == DataTypes.KEYWORD || type == DataTypes.IP || type == DataTypes.TEXT) { return new CountDistinctBytesRefAggregatorFunctionSupplier(bigArrays, inputChannels, precision); } throw EsqlIllegalArgumentException.illegalDataType(type); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java index 744f9f3815e4e..2117828be6533 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Case.java @@ -156,17 +156,33 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - - List conditionsEval = conditions.stream() + ElementType resultType = LocalExecutionPlanner.toElementType(dataType()); + List conditionsFactories = conditions.stream() .map(c -> new ConditionEvaluatorSupplier(toEvaluator.apply(c.condition), toEvaluator.apply(c.value))) .toList(); - var elseValueEval = toEvaluator.apply(elseValue); - return dvrCtx -> new CaseEvaluator( - dvrCtx, - LocalExecutionPlanner.toElementType(dataType()), - conditionsEval.stream().map(x -> x.apply(dvrCtx)).toList(), - elseValueEval.get(dvrCtx) - ); + ExpressionEvaluator.Factory elseValueFactory = toEvaluator.apply(elseValue); + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CaseEvaluator( + context, + resultType, + conditionsFactories.stream().map(x -> x.apply(context)).toList(), + elseValueFactory.get(context) + ); + } + + @Override + public String toString() { + return "CaseEvaluator[resultType=" + + resultType + + ", conditions=" + + conditionsFactories + + ", elseVal=" + + elseValueFactory + + ']'; + } + }; } record ConditionEvaluatorSupplier(ExpressionEvaluator.Factory condition, ExpressionEvaluator.Factory value) @@ -176,6 +192,11 @@ record ConditionEvaluatorSupplier(ExpressionEvaluator.Factory condition, Express public ConditionEvaluator apply(DriverContext driverContext) { return new ConditionEvaluator(condition.get(driverContext), value.get(driverContext)); } + + @Override + public String toString() { + return "ConditionEvaluator[" + "condition=" + condition + ", value=" + value + ']'; + } } record ConditionEvaluator(EvalOperator.ExpressionEvaluator condition, EvalOperator.ExpressionEvaluator value) implements Releasable { @@ -212,9 +233,6 @@ public Block.Ref eval(Page page) { try (Releasable ignored = limited::releaseBlocks) { for (ConditionEvaluator condition : conditions) { try (Block.Ref conditionRef = condition.condition.eval(limited)) { - if (conditionRef.block().areAllValuesNull()) { - continue; - } BooleanBlock b = (BooleanBlock) conditionRef.block(); if (b.isNull(0)) { continue; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java index ae2a28f5d0907..948e44f946920 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Greatest.java @@ -9,16 +9,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMaxBooleanEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMaxBytesRefEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMaxDoubleEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMaxIntEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMaxLongEvaluator; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMax; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -111,42 +106,20 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var suppliers = children().stream().map(toEvaluator).toList(); + ExpressionEvaluator.Factory[] factories = children().stream() + .map(e -> toEvaluator.apply(new MvMax(e.source(), e))) + .toArray(ExpressionEvaluator.Factory[]::new); if (dataType == DataTypes.BOOLEAN) { - return dvrCtx -> new GreatestBooleanEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMaxBooleanEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new GreatestBooleanEvaluator.Factory(factories); } if (dataType == DataTypes.DOUBLE) { - return dvrCtx -> new GreatestDoubleEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMaxDoubleEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new GreatestDoubleEvaluator.Factory(factories); } if (dataType == DataTypes.INTEGER) { - return dvrCtx -> new GreatestIntEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMaxIntEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new GreatestIntEvaluator.Factory(factories); } if (dataType == DataTypes.LONG) { - return dvrCtx -> new GreatestLongEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMaxLongEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new GreatestLongEvaluator.Factory(factories); } if (dataType == DataTypes.KEYWORD || dataType == DataTypes.TEXT @@ -154,13 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new GreatestBytesRefEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMaxBytesRefEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new GreatestBytesRefEvaluator.Factory(factories); } throw EsqlIllegalArgumentException.illegalDataType(dataType); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java index 1219ae83b318c..f3b15a9f1f7eb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/Least.java @@ -9,16 +9,11 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Param; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinBooleanEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinBytesRefEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinDoubleEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinIntEvaluator; -import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMinLongEvaluator; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvMin; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.TypeResolutions; @@ -111,42 +106,20 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var suppliers = children().stream().map(toEvaluator).toList(); + ExpressionEvaluator.Factory[] factories = children().stream() + .map(e -> toEvaluator.apply(new MvMin(e.source(), e))) + .toArray(ExpressionEvaluator.Factory[]::new); if (dataType == DataTypes.BOOLEAN) { - return dvrCtx -> new LeastBooleanEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMinBooleanEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new LeastBooleanEvaluator.Factory(factories); } if (dataType == DataTypes.DOUBLE) { - return dvrCtx -> new LeastDoubleEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMinDoubleEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new LeastDoubleEvaluator.Factory(factories); } if (dataType == DataTypes.INTEGER) { - return dvrCtx -> new LeastIntEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMinIntEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new LeastIntEvaluator.Factory(factories); } if (dataType == DataTypes.LONG) { - return dvrCtx -> new LeastLongEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMinLongEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new LeastLongEvaluator.Factory(factories); } if (dataType == DataTypes.KEYWORD || dataType == DataTypes.TEXT @@ -154,13 +127,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new LeastBytesRefEvaluator( - suppliers.stream() - .map(es -> es.get(dvrCtx)) - .map(ev -> new MvMinBytesRefEvaluator(ev, dvrCtx)) - .toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + return new LeastBytesRefEvaluator.Factory(factories); } throw EsqlIllegalArgumentException.illegalDataType(dataType); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java index 291506ce5afb3..c717fafa877a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/AbstractConvertFunction.java @@ -9,7 +9,6 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.Vector; @@ -45,11 +44,11 @@ protected AbstractConvertFunction(Source source, Expression field) { */ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { DataType sourceType = field().dataType(); - var evaluator = evaluators().get(sourceType); - if (evaluator == null) { + var factory = factories().get(sourceType); + if (factory == null) { throw EsqlIllegalArgumentException.illegalDataType(sourceType); } - return dvrCtx -> evaluator.apply(fieldEval.get(dvrCtx), source(), dvrCtx); + return factory.build(fieldEval, source()); } @Override @@ -59,14 +58,19 @@ protected final TypeResolution resolveType() { } return isType( field(), - evaluators()::containsKey, + factories()::containsKey, sourceText(), null, - evaluators().keySet().stream().map(dt -> dt.name().toLowerCase(Locale.ROOT)).sorted().toArray(String[]::new) + factories().keySet().stream().map(dt -> dt.name().toLowerCase(Locale.ROOT)).sorted().toArray(String[]::new) ); } - protected abstract Map> evaluators(); + @FunctionalInterface + interface BuildFactory { + ExpressionEvaluator.Factory build(ExpressionEvaluator.Factory field, Source source); + } + + protected abstract Map factories(); @Override public final Object fold() { @@ -106,9 +110,6 @@ protected AbstractEvaluator(DriverContext driverContext, EvalOperator.Expression public Block.Ref eval(Page page) { try (Block.Ref ref = fieldEvaluator.eval(page)) { - if (ref.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), driverContext.blockFactory())); - } Vector vector = ref.block().asVector(); return Block.Ref.floating(vector == null ? evalBlock(ref.block()) : evalVector(vector)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java index 701b3fa67732c..442c106042fa0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToBoolean.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -31,31 +28,21 @@ public class ToBoolean extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - BOOLEAN, - (fieldEval, source, driverContext) -> fieldEval, - KEYWORD, - ToBooleanFromStringEvaluator::new, - DOUBLE, - ToBooleanFromDoubleEvaluator::new, - LONG, - ToBooleanFromLongEvaluator::new, - UNSIGNED_LONG, - ToBooleanFromUnsignedLongEvaluator::new, - INTEGER, - ToBooleanFromIntEvaluator::new - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(BOOLEAN, (field, source) -> field), + Map.entry(KEYWORD, ToBooleanFromStringEvaluator.Factory::new), + Map.entry(DOUBLE, ToBooleanFromDoubleEvaluator.Factory::new), + Map.entry(LONG, ToBooleanFromLongEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToBooleanFromUnsignedLongEvaluator.Factory::new), + Map.entry(INTEGER, ToBooleanFromIntEvaluator.Factory::new) + ); public ToBoolean(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java index eb23e460b88ff..d73cb59308be7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDatetime.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateParse; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -30,31 +27,21 @@ public class ToDatetime extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - DATETIME, - (fieldEval, source, driverContext) -> fieldEval, - LONG, - (fieldEval, source, driverContext) -> fieldEval, - KEYWORD, - ToDatetimeFromStringEvaluator::new, - DOUBLE, - ToLongFromDoubleEvaluator::new, - UNSIGNED_LONG, - ToLongFromUnsignedLongEvaluator::new, - INTEGER, - ToLongFromIntEvaluator::new // CastIntToLongEvaluator would be a candidate, but not MV'd - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(DATETIME, (field, source) -> field), + Map.entry(LONG, (field, source) -> field), + Map.entry(KEYWORD, ToDatetimeFromStringEvaluator.Factory::new), + Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), + Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd + ); public ToDatetime(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java index 299e8cfe8643e..6b0d638e875a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDegrees.java @@ -7,10 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -30,39 +27,22 @@ * to degrees. */ public class ToDegrees extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - DOUBLE, - ToDegreesEvaluator::new, - INTEGER, - (field, source, driverContext) -> new ToDegreesEvaluator( - new ToDoubleFromIntEvaluator(field, source, driverContext), - source, - driverContext - ), - LONG, - (field, source, driverContext) -> new ToDegreesEvaluator( - new ToDoubleFromLongEvaluator(field, source, driverContext), - source, - driverContext - ), + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(DOUBLE, ToDegreesEvaluator.Factory::new), + Map.entry(INTEGER, (field, source) -> new ToDegreesEvaluator.Factory(new ToDoubleFromIntEvaluator.Factory(field, source), source)), + Map.entry(LONG, (field, source) -> new ToDegreesEvaluator.Factory(new ToDoubleFromLongEvaluator.Factory(field, source), source)), + Map.entry( UNSIGNED_LONG, - (field, source, driverContext) -> new ToDegreesEvaluator( - new ToDoubleFromUnsignedLongEvaluator(field, source, driverContext), - source, - driverContext - ) - ); + (field, source) -> new ToDegreesEvaluator.Factory(new ToDoubleFromUnsignedLongEvaluator.Factory(field, source), source) + ) + ); public ToDegrees(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java index 690f7a66cbece..9972ae1d3dd81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDouble.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -31,33 +28,22 @@ public class ToDouble extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - DOUBLE, - (fieldEval, source, driverContext) -> fieldEval, - BOOLEAN, - ToDoubleFromBooleanEvaluator::new, - DATETIME, - ToDoubleFromLongEvaluator::new, // CastLongToDoubleEvaluator would be a candidate, but not MV'd - KEYWORD, - ToDoubleFromStringEvaluator::new, - UNSIGNED_LONG, - ToDoubleFromUnsignedLongEvaluator::new, - LONG, - ToDoubleFromLongEvaluator::new, // CastLongToDoubleEvaluator would be a candidate, but not MV'd - INTEGER, - ToDoubleFromIntEvaluator::new // CastIntToDoubleEvaluator would be a candidate, but not MV'd - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(DOUBLE, (fieldEval, source) -> fieldEval), + Map.entry(BOOLEAN, ToDoubleFromBooleanEvaluator.Factory::new), + Map.entry(DATETIME, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd + Map.entry(KEYWORD, ToDoubleFromStringEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToDoubleFromUnsignedLongEvaluator.Factory::new), + Map.entry(LONG, ToDoubleFromLongEvaluator.Factory::new), // CastLongToDoubleEvaluator would be a candidate, but not MV'd + Map.entry(INTEGER, ToDoubleFromIntEvaluator.Factory::new) // CastIntToDoubleEvaluator would be a candidate, but not MV'd + ); public ToDouble(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java index d55b9d23975e1..07c0bfedb98c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToIP.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -26,23 +23,17 @@ public class ToIP extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - IP, - (fieldEval, source, driverContext) -> fieldEval, - KEYWORD, - ToIPFromStringEvaluator::new - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(IP, (field, source) -> field), + Map.entry(KEYWORD, ToIPFromStringEvaluator.Factory::new) + ); public ToIP(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java index 0fcf62ed3864a..3f3b492095949 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToInteger.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -32,33 +29,22 @@ public class ToInteger extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - INTEGER, - (fieldEval, source, driverContext) -> fieldEval, - BOOLEAN, - ToIntegerFromBooleanEvaluator::new, - DATETIME, - ToIntegerFromLongEvaluator::new, - KEYWORD, - ToIntegerFromStringEvaluator::new, - DOUBLE, - ToIntegerFromDoubleEvaluator::new, - UNSIGNED_LONG, - ToIntegerFromUnsignedLongEvaluator::new, - LONG, - ToIntegerFromLongEvaluator::new - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(INTEGER, (fieldEval, source) -> fieldEval), + Map.entry(BOOLEAN, ToIntegerFromBooleanEvaluator.Factory::new), + Map.entry(DATETIME, ToIntegerFromLongEvaluator.Factory::new), + Map.entry(KEYWORD, ToIntegerFromStringEvaluator.Factory::new), + Map.entry(DOUBLE, ToIntegerFromDoubleEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToIntegerFromUnsignedLongEvaluator.Factory::new), + Map.entry(LONG, ToIntegerFromLongEvaluator.Factory::new) + ); public ToInteger(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java index 8e50dd8540ffd..e7f60abc6c3d4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToLong.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -33,33 +30,22 @@ public class ToLong extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - LONG, - (fieldEval, source, driverContext) -> fieldEval, - DATETIME, - (fieldEval, source, driverContext) -> fieldEval, - BOOLEAN, - ToLongFromBooleanEvaluator::new, - KEYWORD, - ToLongFromStringEvaluator::new, - DOUBLE, - ToLongFromDoubleEvaluator::new, - UNSIGNED_LONG, - ToLongFromUnsignedLongEvaluator::new, - INTEGER, - ToLongFromIntEvaluator::new // CastIntToLongEvaluator would be a candidate, but not MV'd - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(LONG, (fieldEval, source) -> fieldEval), + Map.entry(DATETIME, (fieldEval, source) -> fieldEval), + Map.entry(BOOLEAN, ToLongFromBooleanEvaluator.Factory::new), + Map.entry(KEYWORD, ToLongFromStringEvaluator.Factory::new), + Map.entry(DOUBLE, ToLongFromDoubleEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToLongFromUnsignedLongEvaluator.Factory::new), + Map.entry(INTEGER, ToLongFromIntEvaluator.Factory::new) // CastIntToLongEvaluator would be a candidate, but not MV'd + ); public ToLong(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java index 8bb5180e09752..9f39015a8e063 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToRadians.java @@ -7,10 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -30,39 +27,22 @@ * to radians. */ public class ToRadians extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - DOUBLE, - ToRadiansEvaluator::new, - INTEGER, - (field, source, driverContext) -> new ToRadiansEvaluator( - new ToDoubleFromIntEvaluator(field, source, driverContext), - source, - driverContext - ), - LONG, - (field, source, driverContext) -> new ToRadiansEvaluator( - new ToDoubleFromLongEvaluator(field, source, driverContext), - source, - driverContext - ), + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(DOUBLE, ToRadiansEvaluator.Factory::new), + Map.entry(INTEGER, (field, source) -> new ToRadiansEvaluator.Factory(new ToDoubleFromIntEvaluator.Factory(field, source), source)), + Map.entry(LONG, (field, source) -> new ToRadiansEvaluator.Factory(new ToDoubleFromLongEvaluator.Factory(field, source), source)), + Map.entry( UNSIGNED_LONG, - (field, source, driverContext) -> new ToRadiansEvaluator( - new ToDoubleFromUnsignedLongEvaluator(field, source, driverContext), - source, - driverContext - ) - ); + (field, source) -> new ToRadiansEvaluator.Factory(new ToDoubleFromUnsignedLongEvaluator.Factory(field, source), source) + ) + ); public ToRadians(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java index 89d1b43ace0dd..98118162e742d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToString.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -39,30 +36,18 @@ public class ToString extends AbstractConvertFunction implements EvaluatorMapper { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - KEYWORD, - (fieldEval, source, driverContext) -> fieldEval, - BOOLEAN, - ToStringFromBooleanEvaluator::new, - DATETIME, - ToStringFromDatetimeEvaluator::new, - IP, - ToStringFromIPEvaluator::new, - DOUBLE, - ToStringFromDoubleEvaluator::new, - LONG, - ToStringFromLongEvaluator::new, - INTEGER, - ToStringFromIntEvaluator::new, - TEXT, - (fieldEval, source, driverContext) -> fieldEval, - VERSION, - ToStringFromVersionEvaluator::new, - UNSIGNED_LONG, - ToStringFromUnsignedLongEvaluator::new - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(KEYWORD, (fieldEval, source) -> fieldEval), + Map.entry(BOOLEAN, ToStringFromBooleanEvaluator.Factory::new), + Map.entry(DATETIME, ToStringFromDatetimeEvaluator.Factory::new), + Map.entry(IP, ToStringFromIPEvaluator.Factory::new), + Map.entry(DOUBLE, ToStringFromDoubleEvaluator.Factory::new), + Map.entry(LONG, ToStringFromLongEvaluator.Factory::new), + Map.entry(INTEGER, ToStringFromIntEvaluator.Factory::new), + Map.entry(TEXT, (fieldEval, source) -> fieldEval), + Map.entry(VERSION, ToStringFromVersionEvaluator.Factory::new), + Map.entry(UNSIGNED_LONG, ToStringFromUnsignedLongEvaluator.Factory::new) + ); public ToString( Source source, @@ -75,9 +60,7 @@ public ToString( } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java index 396aa03f39dc6..be96fdb7139d1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToUnsignedLong.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -34,33 +31,22 @@ public class ToUnsignedLong extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.of( - UNSIGNED_LONG, - (fieldEval, source, driverContext) -> fieldEval, - DATETIME, - ToUnsignedLongFromLongEvaluator::new, - BOOLEAN, - ToUnsignedLongFromBooleanEvaluator::new, - KEYWORD, - ToUnsignedLongFromStringEvaluator::new, - DOUBLE, - ToUnsignedLongFromDoubleEvaluator::new, - LONG, - ToUnsignedLongFromLongEvaluator::new, - INTEGER, - ToUnsignedLongFromIntEvaluator::new - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(UNSIGNED_LONG, (fieldEval, source) -> fieldEval), + Map.entry(DATETIME, ToUnsignedLongFromLongEvaluator.Factory::new), + Map.entry(BOOLEAN, ToUnsignedLongFromBooleanEvaluator.Factory::new), + Map.entry(KEYWORD, ToUnsignedLongFromStringEvaluator.Factory::new), + Map.entry(DOUBLE, ToUnsignedLongFromDoubleEvaluator.Factory::new), + Map.entry(LONG, ToUnsignedLongFromLongEvaluator.Factory::new), + Map.entry(INTEGER, ToUnsignedLongFromIntEvaluator.Factory::new) + ); public ToUnsignedLong(Source source, Expression field) { super(source, field); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java index d652792ea9819..ad7712f33d947 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToVersion.java @@ -8,10 +8,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.convert; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.ConvertEvaluator; -import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -28,22 +25,18 @@ public class ToVersion extends AbstractConvertFunction { - private static final Map< - DataType, - TriFunction> EVALUATORS = Map.ofEntries( - Map.entry(VERSION, (fieldEval, source, driverContext) -> fieldEval), - Map.entry(KEYWORD, ToVersionFromStringEvaluator::new), - Map.entry(TEXT, ToVersionFromStringEvaluator::new) - ); + private static final Map EVALUATORS = Map.ofEntries( + Map.entry(VERSION, (fieldEval, source) -> fieldEval), + Map.entry(KEYWORD, ToVersionFromStringEvaluator.Factory::new), + Map.entry(TEXT, ToVersionFromStringEvaluator.Factory::new) + ); public ToVersion(Source source, @Param(name = "v", type = { "keyword", "text", "version" }) Expression v) { super(source, v); } @Override - protected - Map> - evaluators() { + protected Map factories() { return EVALUATORS; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java index 455c9d162dc8a..c7c923e8e912a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/BinaryDateTimeFunction.java @@ -17,9 +17,6 @@ import java.time.ZoneId; import java.time.ZoneOffset; import java.util.Objects; -import java.util.function.Predicate; - -import static org.elasticsearch.common.logging.LoggerMessageFormat.format; public abstract class BinaryDateTimeFunction extends BinaryScalarFunction { @@ -69,12 +66,4 @@ public boolean equals(Object o) { BinaryDateTimeFunction that = (BinaryDateTimeFunction) o; return zoneId().equals(that.zoneId()); } - - // TODO: drop check once 8.11 is released - static TypeResolution argumentTypesAreSwapped(DataType left, DataType right, Predicate rightTest, String source) { - if (DataTypes.isDateTime(left) && rightTest.test(right)) { - return new TypeResolution(format(null, "function definition has been updated, please swap arguments in [{}]", source)); - } - return TypeResolution.TYPE_RESOLVED; - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java index 1b33d5829e472..96d78474bbb9e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateExtract.java @@ -31,7 +31,6 @@ import java.util.Locale; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.expression.function.scalar.date.BinaryDateTimeFunction.argumentTypesAreSwapped; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isStringAndExact; @@ -52,16 +51,10 @@ public ExpressionEvaluator.Factory toEvaluator(Function new DateExtractConstantEvaluator(fieldEvaluator.get(dvrCtx), chrono, configuration().zoneId(), dvrCtx); + return new DateExtractConstantEvaluator.Factory(fieldEvaluator, chrono, configuration().zoneId()); } var chronoEvaluator = toEvaluator.apply(children().get(0)); - return dvrCtx -> new DateExtractEvaluator( - source(), - fieldEvaluator.get(dvrCtx), - chronoEvaluator.get(dvrCtx), - configuration().zoneId(), - dvrCtx - ); + return new DateExtractEvaluator.Factory(source(), fieldEvaluator, chronoEvaluator, configuration().zoneId()); } private ChronoField chronoField() { @@ -115,25 +108,9 @@ protected TypeResolution resolveType() { if (childrenResolved() == false) { return new TypeResolution("Unresolved children"); } - TypeResolution resolution = argumentTypesAreSwapped( - children().get(0).dataType(), - children().get(1).dataType(), - DataTypes::isString, - sourceText() + return isStringAndExact(children().get(0), sourceText(), TypeResolutions.ParamOrdinal.FIRST).and( + isDate(children().get(1), sourceText(), TypeResolutions.ParamOrdinal.SECOND) ); - if (resolution.unresolved()) { - return resolution; - } - resolution = isStringAndExact(children().get(0), sourceText(), TypeResolutions.ParamOrdinal.FIRST); - if (resolution.unresolved()) { - return resolution; - } - resolution = isDate(children().get(1), sourceText(), TypeResolutions.ParamOrdinal.SECOND); - if (resolution.unresolved()) { - return resolution; - } - - return TypeResolution.TYPE_RESOLVED; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java index 0ec2aae8306db..e7ae1f8d4aeca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateFormat.java @@ -28,7 +28,6 @@ import java.util.Locale; import java.util.function.Function; -import static org.elasticsearch.xpack.esql.expression.function.scalar.date.BinaryDateTimeFunction.argumentTypesAreSwapped; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isDate; @@ -57,15 +56,7 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - TypeResolution resolution; - if (format != null) { - resolution = argumentTypesAreSwapped(format.dataType(), field.dataType(), DataTypes::isString, sourceText()); - if (resolution.unresolved()) { - return resolution; - } - } - - resolution = isDate(field, sourceText(), format == null ? FIRST : SECOND); + TypeResolution resolution = isDate(field, sourceText(), format == null ? FIRST : SECOND); if (resolution.unresolved()) { return resolution; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java index 60985b80986c4..4e014690288f6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateParse.java @@ -104,7 +104,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new DateParseConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), DEFAULT_FORMATTER, dvrCtx); + return new DateParseConstantEvaluator.Factory(source(), fieldEvaluator, DEFAULT_FORMATTER); } if (format.dataType() != DataTypes.KEYWORD) { throw new IllegalArgumentException("unsupported data type for date_parse [" + format.dataType() + "]"); @@ -112,13 +112,13 @@ public ExpressionEvaluator.Factory toEvaluator(Function new DateParseConstantEvaluator(source(), fieldEvaluator.get(dvrCtx), formatter, dvrCtx); + return new DateParseConstantEvaluator.Factory(source(), fieldEvaluator, formatter); } catch (IllegalArgumentException e) { throw new EsqlIllegalArgumentException(e, "invalid date pattern for [{}]: {}", sourceText(), e.getMessage()); } } ExpressionEvaluator.Factory formatEvaluator = toEvaluator.apply(format); - return dvrCtx -> new DateParseEvaluator(source(), fieldEvaluator.get(dvrCtx), formatEvaluator.get(dvrCtx), zone, dvrCtx); + return new DateParseEvaluator.Factory(source(), fieldEvaluator, formatEvaluator, zone); } private static DateFormatter toFormatter(Object format, ZoneId zone) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java index 55885bf514fe2..0c70c9065dfc4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateTrunc.java @@ -42,22 +42,9 @@ protected TypeResolution resolveType() { return new TypeResolution("Unresolved children"); } - TypeResolution resolution = argumentTypesAreSwapped( - left().dataType(), - right().dataType(), - EsqlDataTypes::isTemporalAmount, - sourceText() + return isDate(timestampField(), sourceText(), FIRST).and( + isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), SECOND, "dateperiod", "timeduration") ); - if (resolution.unresolved()) { - return resolution; - } - - resolution = isDate(timestampField(), sourceText(), FIRST); - if (resolution.unresolved()) { - return resolution; - } - - return isType(interval(), EsqlDataTypes::isTemporalAmount, sourceText(), SECOND, "dateperiod", "timeduration"); } @Override @@ -166,6 +153,6 @@ public ExpressionEvaluator.Factory toEvaluator(Function new DateTruncEvaluator(fieldEvaluator.get(dvrCtx), rounding, dvrCtx); + return new DateTruncEvaluator.Factory(fieldEvaluator, rounding); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java index ea4fd3820319b..90766a95e9cc0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Abs.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.Expression; @@ -22,6 +23,7 @@ import java.util.function.Function; public class Abs extends UnaryScalarFunction implements EvaluatorMapper { + @FunctionInfo(returnType = { "integer", "long", "double", "unsigned_long" }) public Abs(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @@ -50,16 +52,16 @@ static int process(int fieldVal) { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); if (dataType() == DataTypes.DOUBLE) { - return dvrCtx -> new AbsDoubleEvaluator(field.get(dvrCtx), dvrCtx); + return new AbsDoubleEvaluator.Factory(field); } if (dataType() == DataTypes.UNSIGNED_LONG) { return field; } if (dataType() == DataTypes.LONG) { - return dvrCtx -> new AbsLongEvaluator(field.get(dvrCtx), dvrCtx); + return new AbsLongEvaluator.Factory(field); } if (dataType() == DataTypes.INTEGER) { - return dvrCtx -> new AbsIntEvaluator(field.get(dvrCtx), dvrCtx); + return new AbsIntEvaluator.Factory(field); } throw EsqlIllegalArgumentException.illegalDataType(dataType()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java index 400118a1f7edf..08a842e8b9fd7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AbstractTrigonometricFunction.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -30,12 +29,11 @@ abstract class AbstractTrigonometricFunction extends UnaryScalarFunction impleme super(source, field); } - protected abstract EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrDtx); + protected abstract EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field); @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var fieldEval = Cast.cast(field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field())); - return dvrCtx -> doubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + return doubleEvaluator(Cast.cast(field().dataType(), DataTypes.DOUBLE, toEvaluator.apply(field()))); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java index 8484af1ff738e..5df73102a5ee6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Acos.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Inverse cosine trigonometric function. */ public class Acos extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Acos(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new AcosEvaluator(source(), field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new AcosEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java index 7a87bca7d942c..66d35d8e8bb2c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Asin.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Inverse cosine trigonometric function. */ public class Asin extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Asin(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new AsinEvaluator(source(), field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new AsinEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java index 02631e3f39f31..f730b3358a7f1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Inverse cosine trigonometric function. */ public class Atan extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Atan(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new AtanEvaluator(field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new AtanEvaluator.Factory(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java index c861bb30ef3f6..31fdea6e0d00c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Atan2.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; @@ -33,6 +34,7 @@ public class Atan2 extends ScalarFunction implements EvaluatorMapper { private final Expression y; private final Expression x; + @FunctionInfo(returnType = "double") public Atan2( Source source, @Param(name = "y", type = { "integer", "long", "double", "unsigned_long" }) Expression y, @@ -85,7 +87,7 @@ public boolean foldable() { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var yEval = Cast.cast(y.dataType(), DataTypes.DOUBLE, toEvaluator.apply(y)); var xEval = Cast.cast(x.dataType(), DataTypes.DOUBLE, toEvaluator.apply(x)); - return dvrCtx -> new Atan2Evaluator(yEval.get(dvrCtx), xEval.get(dvrCtx), dvrCtx); + return new Atan2Evaluator.Factory(yEval, xEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java index 3aaca5f53e8d0..33115352d9e54 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucket.java @@ -14,6 +14,8 @@ import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.date.DateTrunc; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; @@ -81,7 +83,14 @@ public class AutoBucket extends ScalarFunction implements EvaluatorMapper { private final Expression from; private final Expression to; - public AutoBucket(Source source, Expression field, Expression buckets, Expression from, Expression to) { + @FunctionInfo(returnType = { "double", "date" }) + public AutoBucket( + Source source, + @Param(name = "field", type = { "integer", "long", "double", "date" }) Expression field, + @Param(name = "buckets", type = { "integer" }) Expression buckets, + @Param(name = "from", type = { "integer", "long", "double", "date" }) Expression from, + @Param(name = "to", type = { "integer", "long", "double", "date" }) Expression to + ) { super(source, List.of(field, buckets, from, to)); this.field = field; this.buckets = buckets; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java index 4179b7d0f750d..91e4bbf5eae94 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cast.java @@ -26,31 +26,31 @@ public static ExpressionEvaluator.Factory cast(DataType current, DataType requir return in; } if (current == DataTypes.NULL || required == DataTypes.NULL) { - return dvrCtx -> EvalOperator.CONSTANT_NULL; + return EvalOperator.CONSTANT_NULL_FACTORY; } if (required == DataTypes.DOUBLE) { if (current == DataTypes.LONG) { - return dvrCtx -> new CastLongToDoubleEvaluator(in.get(dvrCtx), dvrCtx); + return new CastLongToDoubleEvaluator.Factory(in); } if (current == DataTypes.INTEGER) { - return dvrCtx -> new CastIntToDoubleEvaluator(in.get(dvrCtx), dvrCtx); + return new CastIntToDoubleEvaluator.Factory(in); } if (current == DataTypes.UNSIGNED_LONG) { - return dvrCtx -> new CastUnsignedLongToDoubleEvaluator(in.get(dvrCtx), dvrCtx); + return new CastUnsignedLongToDoubleEvaluator.Factory(in); } throw cantCast(current, required); } if (required == DataTypes.UNSIGNED_LONG) { if (current == DataTypes.LONG) { - return dvrCtx -> new CastLongToUnsignedLongEvaluator(in.get(dvrCtx), dvrCtx); + return new CastLongToUnsignedLongEvaluator.Factory(in); } if (current == DataTypes.INTEGER) { - return dvrCtx -> new CastIntToUnsignedLongEvaluator(in.get(dvrCtx), dvrCtx); + return new CastIntToUnsignedLongEvaluator.Factory(in); } } if (required == DataTypes.LONG) { if (current == DataTypes.INTEGER) { - return dvrCtx -> new CastIntToLongEvaluator(in.get(dvrCtx), dvrCtx); + return new CastIntToLongEvaluator.Factory(in); } throw cantCast(current, required); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java index 2569e4fae6035..d9b9089795103 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Ceil.java @@ -40,7 +40,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new CeilDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + return new CeilDoubleEvaluator.Factory(fieldEval); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java index 327f99fbebd13..dab1fa65d05dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cos.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Cosine trigonometric function. */ public class Cos extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Cos(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new CosEvaluator(field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new CosEvaluator.Factory(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java index bbf1a77182c54..cc315c3e9569c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Cosh.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Cosine hyperbolic function. */ public class Cosh extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Cosh(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new CoshEvaluator(source(), field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new CoshEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java index f30bf79a74fbb..98e72b7dedb61 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Floor.java @@ -39,8 +39,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function new FloorDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); + return new FloorDoubleEvaluator.Factory(toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java index 4d73516f2399c..19d080647b374 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsFinite.java @@ -24,7 +24,7 @@ public IsFinite(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - return dvrCtx -> new IsFiniteEvaluator(field.get(dvrCtx), dvrCtx); + return new IsFiniteEvaluator.Factory(field); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java index 0fb65d14eee04..70e8137d8871e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsInfinite.java @@ -23,8 +23,7 @@ public IsInfinite(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var field = toEvaluator.apply(field()); - return dvrCtx -> new IsInfiniteEvaluator(field.get(dvrCtx), dvrCtx); + return new IsInfiniteEvaluator.Factory(toEvaluator.apply(field())); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java index 1b44158d9e8fc..4db5534631fc9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/IsNaN.java @@ -23,8 +23,7 @@ public IsNaN(Source source, Expression field) { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var field = toEvaluator.apply(field()); - return dvrCtx -> new IsNaNEvaluator(field.get(dvrCtx), dvrCtx); + return new IsNaNEvaluator.Factory(toEvaluator.apply(field())); } @Evaluator diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java index 89bcc40d59ff4..84bc9d19b409e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Log10.java @@ -37,16 +37,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function new Log10DoubleEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new Log10DoubleEvaluator.Factory(source(), field); } if (fieldType == DataTypes.INTEGER) { - return dvrCtx -> new Log10IntEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new Log10IntEvaluator.Factory(source(), field); } if (fieldType == DataTypes.LONG) { - return dvrCtx -> new Log10LongEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new Log10LongEvaluator.Factory(source(), field); } if (fieldType == DataTypes.UNSIGNED_LONG) { - return dvrCtx -> new Log10UnsignedLongEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new Log10UnsignedLongEvaluator.Factory(source(), field); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java index 0049d02a74b2c..48db81fefbc98 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Pow.java @@ -172,25 +172,22 @@ public ExpressionEvaluator.Factory toEvaluator(Function new PowDoubleEvaluator( + return new PowDoubleEvaluator.Factory( source(), - cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(dvrCtx), - cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get(dvrCtx), - dvrCtx + cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator), + cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator) ); } else if (dataType == DataTypes.LONG) { - return dvrCtx -> new PowLongEvaluator( + return new PowLongEvaluator.Factory( source(), - cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(dvrCtx), - cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get(dvrCtx), - dvrCtx + cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator), + cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator) ); } else { - return dvrCtx -> new PowIntEvaluator( + return new PowIntEvaluator.Factory( source(), - cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator).get(dvrCtx), - cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator).get(dvrCtx), - dvrCtx + cast(base.dataType(), DataTypes.DOUBLE, baseEvaluator), + cast(exponent.dataType(), DataTypes.DOUBLE, exponentEvaluator) ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index 7736148ea8f9b..4e1d12606a34f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -7,9 +7,7 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; -import org.elasticsearch.common.TriFunction; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -136,35 +134,31 @@ public ScriptTemplate asScript() { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { DataType fieldType = dataType(); if (fieldType == DataTypes.DOUBLE) { - return toEvaluator(toEvaluator, RoundDoubleNoDecimalsEvaluator::new, RoundDoubleEvaluator::new); + return toEvaluator(toEvaluator, RoundDoubleNoDecimalsEvaluator.Factory::new, RoundDoubleEvaluator.Factory::new); } if (fieldType == DataTypes.INTEGER) { - return toEvaluator(toEvaluator, identity(), RoundIntEvaluator::new); + return toEvaluator(toEvaluator, Function.identity(), RoundIntEvaluator.Factory::new); } if (fieldType == DataTypes.LONG) { - return toEvaluator(toEvaluator, identity(), RoundLongEvaluator::new); + return toEvaluator(toEvaluator, Function.identity(), RoundLongEvaluator.Factory::new); } if (fieldType == DataTypes.UNSIGNED_LONG) { - return toEvaluator(toEvaluator, identity(), RoundUnsignedLongEvaluator::new); + return toEvaluator(toEvaluator, Function.identity(), RoundUnsignedLongEvaluator.Factory::new); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); } - private static BiFunction identity() { - return (t, u) -> t; - } - private ExpressionEvaluator.Factory toEvaluator( Function toEvaluator, - BiFunction noDecimals, - TriFunction withDecimals + Function noDecimals, + BiFunction withDecimals ) { var fieldEvaluator = toEvaluator.apply(field()); if (decimals == null) { - return dvrCtx -> noDecimals.apply(fieldEvaluator.get(dvrCtx), dvrCtx); + return noDecimals.apply(fieldEvaluator); } var decimalsEvaluator = Cast.cast(decimals().dataType(), DataTypes.LONG, toEvaluator.apply(decimals())); - return dvrCtx -> withDecimals.apply(fieldEvaluator.get(dvrCtx), decimalsEvaluator.get(dvrCtx), dvrCtx); + return withDecimals.apply(fieldEvaluator, decimalsEvaluator); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java index 4be75f853b91f..eaf632ee8c40e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sin.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; @@ -32,8 +31,8 @@ public Sin( } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new SinEvaluator(field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new SinEvaluator.Factory(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java index d6f62a52704b6..07228dd1743dd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sinh.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Sine hyperbolic function. */ public class Sinh extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Sinh(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new SinhEvaluator(source(), field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new SinhEvaluator.Factory(source(), field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java index 29888db439dcf..dc9e3bc2b3fde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Sqrt.java @@ -37,16 +37,16 @@ public ExpressionEvaluator.Factory toEvaluator(Function new SqrtDoubleEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new SqrtDoubleEvaluator.Factory(source(), field); } if (fieldType == DataTypes.INTEGER) { - return dvrCtx -> new SqrtIntEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new SqrtIntEvaluator.Factory(source(), field); } if (fieldType == DataTypes.LONG) { - return dvrCtx -> new SqrtLongEvaluator(source(), field.get(dvrCtx), dvrCtx); + return new SqrtLongEvaluator.Factory(source(), field); } if (fieldType == DataTypes.UNSIGNED_LONG) { - return dvrCtx -> new SqrtUnsignedLongEvaluator(field.get(dvrCtx), dvrCtx); + return new SqrtUnsignedLongEvaluator.Factory(field); } throw EsqlIllegalArgumentException.illegalDataType(fieldType); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java index bffdaf440e40e..7980c2dd94cb2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tan.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Tangent trigonometric function. */ public class Tan extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Tan(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new TanEvaluator(field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new TanEvaluator.Factory(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java index 1079ca5e7f914..4d0af5b6a8de9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Tanh.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.math; import org.elasticsearch.compute.ann.Evaluator; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -21,13 +21,14 @@ * Tangent hyperbolic function. */ public class Tanh extends AbstractTrigonometricFunction { + @FunctionInfo(returnType = "double") public Tanh(Source source, @Param(name = "n", type = { "integer", "long", "double", "unsigned_long" }) Expression n) { super(source, n); } @Override - protected EvalOperator.ExpressionEvaluator doubleEvaluator(EvalOperator.ExpressionEvaluator field, DriverContext dvrCtx) { - return new TanhEvaluator(field, dvrCtx); + protected EvalOperator.ExpressionEvaluator.Factory doubleEvaluator(EvalOperator.ExpressionEvaluator.Factory field) { + return new TanhEvaluator.Factory(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java index 78344f0ae51b8..196137336bee5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunction.java @@ -118,8 +118,8 @@ protected Block.Ref evalSingleValuedNullable(Block.Ref fieldRef) { @Override public Block.Ref eval(Page page) { - Block.Ref fieldRef = field.eval(page); - return fieldRef.block().mayHaveMultivaluedFields() ? evalNullable(fieldRef) : evalSingleValuedNullable(fieldRef); + Block.Ref ref = field.eval(page); + return ref.block().mayHaveMultivaluedFields() ? evalNullable(ref) : evalSingleValuedNullable(ref); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java index 7930af6b25d8c..0a6a5d50ee552 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvg.java @@ -46,12 +46,12 @@ public DataType dataType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case DOUBLE -> dvrCtx -> new MvAvgDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case INT -> dvrCtx -> new MvAvgIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case DOUBLE -> new MvAvgDoubleEvaluator.Factory(fieldEval); + case INT -> new MvAvgIntEvaluator.Factory(fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG - ? dvrCtx -> new MvAvgUnsignedLongEvaluator(fieldEval.get(dvrCtx), dvrCtx) - : dvrCtx -> new MvAvgLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; + ? new MvAvgUnsignedLongEvaluator.Factory(fieldEval) + : new MvAvgLongEvaluator.Factory(fieldEval); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java index 1d9108e4ba096..a187bb41ee235 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcat.java @@ -13,9 +13,10 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.TypeResolutions; import org.elasticsearch.xpack.ql.expression.function.scalar.BinaryScalarFunction; @@ -32,7 +33,15 @@ * Reduce a multivalued string field to a single valued field by concatenating all values. */ public class MvConcat extends BinaryScalarFunction implements EvaluatorMapper { - public MvConcat(Source source, Expression field, Expression delim) { + @FunctionInfo( + returnType = "keyword", + description = "Reduce a multivalued string field to a single valued field by concatenating all values." + ) + public MvConcat( + Source source, + @Param(name = "v", type = { "text", "keyword" }, description = "values to join") Expression field, + @Param(name = "delim", type = { "text", "keyword" }, description = "delimiter") Expression delim + ) { super(source, field, delim); } @@ -57,9 +66,7 @@ public DataType dataType() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var fieldEval = toEvaluator.apply(left()); - var delimEval = toEvaluator.apply(right()); - return dvrCtx -> new MvConcatEvaluator(dvrCtx, fieldEval.get(dvrCtx), delimEval.get(dvrCtx)); + return new EvaluatorFactory(toEvaluator.apply(left()), toEvaluator.apply(right())); } @Override @@ -77,6 +84,20 @@ protected NodeInfo info() { return NodeInfo.create(this, MvConcat::new, left(), right()); } + private record EvaluatorFactory(ExpressionEvaluator.Factory field, ExpressionEvaluator.Factory delim) + implements + ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Evaluator(context, field.get(context), delim.get(context)); + } + + @Override + public String toString() { + return "MvConcat[field=" + field + ", delim=" + delim + "]"; + } + } + /** * Evaluator for {@link MvConcat}. Not generated and doesn't extend from * {@link AbstractMultivalueFunction.AbstractEvaluator} because it's just @@ -87,12 +108,12 @@ protected NodeInfo info() { *
  • The actual joining process needs init step per row - {@link BytesRefBuilder#clear()}
  • * */ - private class MvConcatEvaluator implements EvalOperator.ExpressionEvaluator { + private static class Evaluator implements ExpressionEvaluator { private final DriverContext context; - private final EvalOperator.ExpressionEvaluator field; - private final EvalOperator.ExpressionEvaluator delim; + private final ExpressionEvaluator field; + private final ExpressionEvaluator delim; - MvConcatEvaluator(DriverContext context, EvalOperator.ExpressionEvaluator field, EvalOperator.ExpressionEvaluator delim) { + Evaluator(DriverContext context, ExpressionEvaluator field, ExpressionEvaluator delim) { this.context = context; this.field = field; this.delim = delim; @@ -101,9 +122,6 @@ private class MvConcatEvaluator implements EvalOperator.ExpressionEvaluator { @Override public final Block.Ref eval(Page page) { try (Block.Ref fieldRef = field.eval(page); Block.Ref delimRef = delim.eval(page)) { - if (fieldRef.block().areAllValuesNull() || delimRef.block().areAllValuesNull()) { - return Block.Ref.floating(Block.constantNullBlock(page.getPositionCount(), context.blockFactory())); - } BytesRefBlock fieldVal = (BytesRefBlock) fieldRef.block(); BytesRefBlock delimVal = (BytesRefBlock) delimRef.block(); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index 8520366ed82ce..528c0b6d5f0cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; @@ -25,11 +27,21 @@ import static org.elasticsearch.xpack.ql.expression.TypeResolutions.isType; /** - * Reduce a multivalued field to a single valued field containing the minimum value. + * Reduce a multivalued field to a single valued field containing the count of values. */ public class MvCount extends AbstractMultivalueFunction { - public MvCount(Source source, Expression field) { - super(source, field); + @FunctionInfo( + returnType = "integer", + description = "Reduce a multivalued field to a single valued field containing the count of values." + ) + public MvCount( + Source source, + @Param( + name = "v", + type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + ) Expression v + ) { + super(source, v); } @Override @@ -44,7 +56,7 @@ public DataType dataType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { - return dvrCtx -> new Evaluator(dvrCtx, fieldEval.get(dvrCtx)); + return new EvaluatorFactory(fieldEval); } @Override @@ -57,6 +69,18 @@ protected NodeInfo info() { return NodeInfo.create(this, MvCount::new, field()); } + private record EvaluatorFactory(ExpressionEvaluator.Factory field) implements ExpressionEvaluator.Factory { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new Evaluator(context, field.get(context)); + } + + @Override + public String toString() { + return "MvCount[field=" + field + ']'; + } + } + private static class Evaluator extends AbstractEvaluator { private final DriverContext driverContext; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index cec9da98d96a3..bda8faa62f7af 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.compute.operator.MultivalueDedupe; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; @@ -23,7 +25,14 @@ * Removes duplicate values from a multivalued field. */ public class MvDedupe extends AbstractMultivalueFunction { - public MvDedupe(Source source, Expression field) { + @FunctionInfo(returnType = "?", description = "Remove duplicate values from a multivalued field.") + public MvDedupe( + Source source, + @Param( + name = "v", + type = { "boolean", "date", "double", "ip", "text", "integer", "keyword", "version", "long" } // TODO add unsigned_long + ) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java index 5f527beef4967..cee1d533b4332 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMax.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; @@ -26,8 +28,15 @@ * Reduce a multivalued field to a single valued field containing the maximum value. */ public class MvMax extends AbstractMultivalueFunction { - public MvMax(Source source, Expression field) { - super(source, field); + @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the maximum value.") + public MvMax( + Source source, + @Param( + name = "v", + type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + ) Expression v + ) { + super(source, v); } @Override @@ -38,12 +47,12 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case BOOLEAN -> dvrCtx -> new MvMaxBooleanEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case BYTES_REF -> dvrCtx -> new MvMaxBytesRefEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case DOUBLE -> dvrCtx -> new MvMaxDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case INT -> dvrCtx -> new MvMaxIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case LONG -> dvrCtx -> new MvMaxLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; + case BOOLEAN -> new MvMaxBooleanEvaluator.Factory(fieldEval); + case BYTES_REF -> new MvMaxBytesRefEvaluator.Factory(fieldEval); + case DOUBLE -> new MvMaxDoubleEvaluator.Factory(fieldEval); + case INT -> new MvMaxIntEvaluator.Factory(fieldEval); + case LONG -> new MvMaxLongEvaluator.Factory(fieldEval); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java index edd68b1a45a37..e10cbdd86a072 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedian.java @@ -45,11 +45,11 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case DOUBLE -> dvrCtx -> new MvMedianDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case INT -> dvrCtx -> new MvMedianIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); + case DOUBLE -> new MvMedianDoubleEvaluator.Factory(fieldEval); + case INT -> new MvMedianIntEvaluator.Factory(fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG - ? dvrCtx -> new MvMedianUnsignedLongEvaluator(fieldEval.get(dvrCtx), dvrCtx) - : dvrCtx -> new MvMedianLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); + ? new MvMedianUnsignedLongEvaluator.Factory(fieldEval) + : new MvMedianLongEvaluator.Factory(fieldEval); default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java index 2647cbfc2e0c3..18b452f9c7040 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMin.java @@ -12,6 +12,8 @@ import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Expression; @@ -26,7 +28,14 @@ * Reduce a multivalued field to a single valued field containing the minimum value. */ public class MvMin extends AbstractMultivalueFunction { - public MvMin(Source source, Expression field) { + @FunctionInfo(returnType = "?", description = "Reduce a multivalued field to a single valued field containing the minimum value.") + public MvMin( + Source source, + @Param( + name = "v", + type = { "unsigned_long", "date", "boolean", "double", "ip", "text", "integer", "keyword", "version", "long" } + ) Expression field + ) { super(source, field); } @@ -38,12 +47,12 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case BOOLEAN -> dvrCtx -> new MvMinBooleanEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case BYTES_REF -> dvrCtx -> new MvMinBytesRefEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case DOUBLE -> dvrCtx -> new MvMinDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case INT -> dvrCtx -> new MvMinIntEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case LONG -> dvrCtx -> new MvMinLongEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; + case BOOLEAN -> new MvMinBooleanEvaluator.Factory(fieldEval); + case BYTES_REF -> new MvMinBytesRefEvaluator.Factory(fieldEval); + case DOUBLE -> new MvMinDoubleEvaluator.Factory(fieldEval); + case INT -> new MvMinIntEvaluator.Factory(fieldEval); + case LONG -> new MvMinLongEvaluator.Factory(fieldEval); + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index 858842cd78721..f543a8ec3878b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -40,11 +40,11 @@ protected TypeResolution resolveFieldType() { @Override protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fieldEval) { return switch (LocalExecutionPlanner.toElementType(field().dataType())) { - case DOUBLE -> dvrCtx -> new MvSumDoubleEvaluator(fieldEval.get(dvrCtx), dvrCtx); - case INT -> dvrCtx -> new MvSumIntEvaluator(source(), fieldEval.get(dvrCtx), dvrCtx); + case DOUBLE -> new MvSumDoubleEvaluator.Factory(fieldEval); + case INT -> new MvSumIntEvaluator.Factory(source(), fieldEval); case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG - ? dvrCtx -> new MvSumUnsignedLongEvaluator(source(), fieldEval.get(dvrCtx), dvrCtx) - : dvrCtx -> new MvSumLongEvaluator(source(), fieldEval.get(dvrCtx), dvrCtx); + ? new MvSumUnsignedLongEvaluator.Factory(source(), fieldEval) + : new MvSumLongEvaluator.Factory(source(), fieldEval); case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java index 93086317be45c..d0fe387d680db 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/Coalesce.java @@ -121,11 +121,22 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - return dvrCxt -> new CoalesceEvaluator( - dvrCxt, - LocalExecutionPlanner.toElementType(dataType()), - children().stream().map(toEvaluator).map(x -> x.get(dvrCxt)).toList() - ); + List childEvaluators = children().stream().map(toEvaluator).toList(); + return new ExpressionEvaluator.Factory() { + @Override + public ExpressionEvaluator get(DriverContext context) { + return new CoalesceEvaluator( + context, + LocalExecutionPlanner.toElementType(dataType()), + childEvaluators.stream().map(x -> x.get(context)).toList() + ); + } + + @Override + public String toString() { + return "CoalesceEvaluator[values=" + childEvaluators + ']'; + } + }; } private record CoalesceEvaluator(DriverContext driverContext, ElementType resultType, List evaluators) diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java index c987513d5919e..1e84bf60b0dde 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Concat.java @@ -11,7 +11,6 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; -import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.esql.EsqlClientException; @@ -79,16 +78,12 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var values = children().stream().map(toEvaluator).toList(); - return dvrCtx -> new ConcatEvaluator( - new BreakingBytesRefBuilder(dvrCtx.breaker(), "concat"), - values.stream().map(fac -> fac.get(dvrCtx)).toArray(EvalOperator.ExpressionEvaluator[]::new), - dvrCtx - ); + var values = children().stream().map(toEvaluator).toArray(ExpressionEvaluator.Factory[]::new); + return new ConcatEvaluator.Factory(context -> new BreakingBytesRefBuilder(context.breaker(), "concat"), values); } @Evaluator - static BytesRef process(@Fixed(includeInToString = false) BreakingBytesRefBuilder scratch, BytesRef[] values) { + static BytesRef process(@Fixed(includeInToString = false, build = true) BreakingBytesRefBuilder scratch, BytesRef[] values) { scratch.grow(checkedTotalLength(values)); scratch.clear(); for (int i = 0; i < values.length; i++) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java index 367d5323f28b8..1140bfcf1f5d9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/EndsWith.java @@ -98,8 +98,6 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var strEval = toEvaluator.apply(str); - var suffixEval = toEvaluator.apply(suffix); - return dvrCtx -> new EndsWithEvaluator(strEval.get(dvrCtx), suffixEval.get(dvrCtx), dvrCtx); + return new EndsWithEvaluator.Factory(toEvaluator.apply(str), toEvaluator.apply(suffix)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java index ffb5a3543f3f2..952c3314af80a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/LTrim.java @@ -47,8 +47,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var field = toEvaluator.apply(field()); - return dvrCtx -> new LTrimEvaluator(field.get(dvrCtx), dvrCtx); + return new LTrimEvaluator.Factory(toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java index 73024bd74e624..14cb03943f520 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Left.java @@ -55,8 +55,8 @@ public Left( @Evaluator static BytesRef process( - @Fixed(includeInToString = false) BytesRef out, - @Fixed(includeInToString = false) UnicodeUtil.UTF8CodePoint cp, + @Fixed(includeInToString = false, build = true) BytesRef out, + @Fixed(includeInToString = false, build = true) UnicodeUtil.UTF8CodePoint cp, BytesRef str, int length ) { @@ -73,13 +73,12 @@ static BytesRef process( @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var strSupplier = toEvaluator.apply(str); - var lengthSupplier = toEvaluator.apply(length); - return dvrCtx -> { - BytesRef out = new BytesRef(); - UnicodeUtil.UTF8CodePoint cp = new UnicodeUtil.UTF8CodePoint(); - return new LeftEvaluator(out, cp, strSupplier.get(dvrCtx), lengthSupplier.get(dvrCtx), dvrCtx); - }; + return new LeftEvaluator.Factory( + context -> new BytesRef(), + context -> new UnicodeUtil.UTF8CodePoint(), + toEvaluator.apply(str), + toEvaluator.apply(length) + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java index e0a1a8ed297a6..47ee8f20e7f32 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Length.java @@ -72,7 +72,6 @@ protected NodeInfo info() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var field = toEvaluator.apply(field()); - return dvrCtx -> new LengthEvaluator(field.get(dvrCtx), dvrCtx); + return new LengthEvaluator.Factory(toEvaluator.apply(field())); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java index 46c8d43f0a5a7..273a032a90ed3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RTrim.java @@ -47,8 +47,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var field = toEvaluator.apply(field()); - return dvrCtx -> new RTrimEvaluator(field.get(dvrCtx), dvrCtx); + return new RTrimEvaluator.Factory(toEvaluator.apply(field())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java index 99d44b534ac26..0ed4bd0fe7d02 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Replace.java @@ -128,10 +128,10 @@ public ExpressionEvaluator.Factory toEvaluator(Function new ReplaceConstantEvaluator(source(), strEval.get(drvCtx), regexPattern, newStrEval.get(drvCtx), drvCtx); + return new ReplaceConstantEvaluator.Factory(source(), strEval, regexPattern, newStrEval); } var regexEval = toEvaluator.apply(regex); - return (drvCtx) -> new ReplaceEvaluator(source(), strEval.get(drvCtx), regexEval.get(drvCtx), newStrEval.get(drvCtx), drvCtx); + return new ReplaceEvaluator.Factory(source(), strEval, regexEval, newStrEval); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java index 3b79b6683f16f..f77c703e7cb0c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Right.java @@ -55,8 +55,8 @@ public Right( @Evaluator static BytesRef process( - @Fixed(includeInToString = false) BytesRef out, - @Fixed(includeInToString = false) UnicodeUtil.UTF8CodePoint cp, + @Fixed(includeInToString = false, build = true) BytesRef out, + @Fixed(includeInToString = false, build = true) UnicodeUtil.UTF8CodePoint cp, BytesRef str, int length ) { @@ -77,13 +77,12 @@ static BytesRef process( @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var strSupplier = toEvaluator.apply(str); - var lengthSupplier = toEvaluator.apply(length); - return dvrCtx -> { - BytesRef out = new BytesRef(); - UnicodeUtil.UTF8CodePoint cp = new UnicodeUtil.UTF8CodePoint(); - return new RightEvaluator(out, cp, strSupplier.get(dvrCtx), lengthSupplier.get(dvrCtx), dvrCtx); - }; + return new RightEvaluator.Factory( + context -> new BytesRef(), + context -> new UnicodeUtil.UTF8CodePoint(), + toEvaluator.apply(str), + toEvaluator.apply(length) + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java index 75d2ada1ca97b..7f18be0e7b18e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Split.java @@ -70,7 +70,7 @@ static void process( BytesRefBlock.Builder builder, BytesRef str, @Fixed byte delim, - @Fixed(includeInToString = false) BytesRef scratch + @Fixed(includeInToString = false, build = true) BytesRef scratch ) { scratch.bytes = str.bytes; scratch.offset = str.offset; @@ -96,7 +96,12 @@ static void process( } @Evaluator(extraName = "Variable") - static void process(BytesRefBlock.Builder builder, BytesRef str, BytesRef delim, @Fixed(includeInToString = false) BytesRef scratch) { + static void process( + BytesRefBlock.Builder builder, + BytesRef str, + BytesRef delim, + @Fixed(includeInToString = false, build = true) BytesRef scratch + ) { if (delim.length != 1) { throw new QlIllegalArgumentException("delimiter must be single byte for now"); } @@ -117,13 +122,12 @@ protected NodeInfo info() { public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var str = toEvaluator.apply(left()); if (right().foldable() == false) { - var delim = toEvaluator.apply(right()); - return dvrCtx -> new SplitVariableEvaluator(str.get(dvrCtx), delim.get(dvrCtx), new BytesRef(), dvrCtx); + return new SplitVariableEvaluator.Factory(str, toEvaluator.apply(right()), context -> new BytesRef()); } BytesRef delim = (BytesRef) right().fold(); if (delim.length != 1) { throw new QlIllegalArgumentException("for now delimiter must be a single byte"); } - return dvrCtx -> new SplitSingleByteEvaluator(str.get(dvrCtx), delim.bytes[delim.offset], new BytesRef(), dvrCtx); + return new SplitSingleByteEvaluator.Factory(str, delim.bytes[delim.offset], context -> new BytesRef()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java index 8d147f3cf9caf..3497d9360b187 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/StartsWith.java @@ -91,8 +91,6 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var strEval = toEvaluator.apply(str); - var prefixEval = toEvaluator.apply(prefix); - return dvrCtx -> new StartsWithEvaluator(strEval.get(dvrCtx), prefixEval.get(dvrCtx), dvrCtx); + return new StartsWithEvaluator.Factory(toEvaluator.apply(str), toEvaluator.apply(prefix)); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java index 9b8a6c6aa1720..261b7aeb19da2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Substring.java @@ -132,12 +132,12 @@ public ScriptTemplate asScript() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { - var strSupplier = toEvaluator.apply(str); - var startSupplier = toEvaluator.apply(start); + var strFactory = toEvaluator.apply(str); + var startFactory = toEvaluator.apply(start); if (length == null) { - return dvrCtx -> new SubstringNoLengthEvaluator(strSupplier.get(dvrCtx), startSupplier.get(dvrCtx), dvrCtx); + return new SubstringNoLengthEvaluator.Factory(strFactory, startFactory); } - var lengthSupplier = toEvaluator.apply(length); - return dvrCtx -> new SubstringEvaluator(strSupplier.get(dvrCtx), startSupplier.get(dvrCtx), lengthSupplier.get(dvrCtx), dvrCtx); + var lengthFactory = toEvaluator.apply(length); + return new SubstringEvaluator.Factory(strFactory, startFactory, lengthFactory); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java index f9d5febd5fc02..b865199c1c2ae 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/Trim.java @@ -49,7 +49,7 @@ public Object fold() { @Override public ExpressionEvaluator.Factory toEvaluator(Function toEvaluator) { var field = toEvaluator.apply(field()); - return dvrCtx -> new TrimEvaluator(field.get(dvrCtx), dvrCtx); + return new TrimEvaluator.Factory(field); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java index f59211ab42882..1e1da2634fadf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Add.java @@ -32,11 +32,11 @@ public Add(Source source, Expression left, Expression right) { left, right, ADD, - AddIntsEvaluator::new, - AddLongsEvaluator::new, - AddUnsignedLongsEvaluator::new, - (s, l, r, dvrCtx) -> new AddDoublesEvaluator(l, r, dvrCtx), - AddDatetimesEvaluator::new + AddIntsEvaluator.Factory::new, + AddLongsEvaluator.Factory::new, + AddUnsignedLongsEvaluator.Factory::new, + (s, lhs, rhs) -> new AddDoublesEvaluator.Factory(lhs, rhs), + AddDatetimesEvaluator.Factory::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java index af827522b136e..0132301cb79b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/DateTimeArithmeticOperation.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.ExceptionUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; @@ -31,12 +30,7 @@ abstract class DateTimeArithmeticOperation extends EsqlArithmeticOperation { /** Arithmetic (quad) function. */ interface DatetimeArithmeticEvaluator { - ExpressionEvaluator apply( - Source source, - ExpressionEvaluator expressionEvaluator, - TemporalAmount temporalAmount, - DriverContext driverContext - ); + ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory expressionEvaluator, TemporalAmount temporalAmount); } private final DatetimeArithmeticEvaluator datetimes; @@ -149,12 +143,7 @@ public ExpressionEvaluator.Factory toEvaluator(Function datetimes.apply( - source(), - toEvaluator.apply(datetimeArgument).get(dvrCtx), - (TemporalAmount) temporalAmountArgument.fold(), - dvrCtx - ); + return datetimes.apply(source(), toEvaluator.apply(datetimeArgument), (TemporalAmount) temporalAmountArgument.fold()); } else { return super.toEvaluator(toEvaluator); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java index 5a89e24eb6007..0bcbe21c60a63 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Div.java @@ -31,10 +31,10 @@ public Div(Source source, Expression left, Expression right, DataType type) { left, right, DIV, - DivIntsEvaluator::new, - DivLongsEvaluator::new, - DivUnsignedLongsEvaluator::new, - (s, l, r, dvrCtx) -> new DivDoublesEvaluator(l, r, dvrCtx) + DivIntsEvaluator.Factory::new, + DivLongsEvaluator.Factory::new, + DivUnsignedLongsEvaluator.Factory::new, + (s, lhs, rhs) -> new DivDoublesEvaluator.Factory(lhs, rhs) ); this.type = type; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java index d09ae25d91746..dc5be3373198b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/EsqlArithmeticOperation.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -71,12 +70,7 @@ public String symbol() { /** Arithmetic (quad) function. */ interface ArithmeticEvaluator { - ExpressionEvaluator apply( - Source source, - ExpressionEvaluator expressionEvaluator1, - ExpressionEvaluator expressionEvaluator2, - DriverContext driverContext - ); + ExpressionEvaluator.Factory apply(Source source, ExpressionEvaluator.Factory lhs, ExpressionEvaluator.Factory rhs); } private final ArithmeticEvaluator ints; @@ -121,8 +115,8 @@ public ExpressionEvaluator.Factory toEvaluator(Function eval.apply(source(), l.get(dvrCtx), r.get(dvrCtx), dvrCtx); + return eval.apply(source(), lhs, rhs); } throw new EsqlIllegalArgumentException("Unsupported type " + leftType); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java index f1aaeb1adaf14..85510a2f63c33 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mod.java @@ -23,10 +23,10 @@ public Mod(Source source, Expression left, Expression right) { left, right, MOD, - ModIntsEvaluator::new, - ModLongsEvaluator::new, - ModUnsignedLongsEvaluator::new, - (s, l, r, dvrCtx) -> new ModDoublesEvaluator(l, r, dvrCtx) + ModIntsEvaluator.Factory::new, + ModLongsEvaluator.Factory::new, + ModUnsignedLongsEvaluator.Factory::new, + (s, lhs, rhs) -> new ModDoublesEvaluator.Factory(lhs, rhs) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java index 9b42cfce182b9..963f09486a361 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Mul.java @@ -27,10 +27,10 @@ public Mul(Source source, Expression left, Expression right) { left, right, MUL, - MulIntsEvaluator::new, - MulLongsEvaluator::new, - MulUnsignedLongsEvaluator::new, - (s, l, r, dvrCtx) -> new MulDoublesEvaluator(l, r, dvrCtx) + MulIntsEvaluator.Factory::new, + MulLongsEvaluator.Factory::new, + MulUnsignedLongsEvaluator.Factory::new, + (s, lhs, rhs) -> new MulDoublesEvaluator.Factory(lhs, rhs) ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java index 97a0323829d59..2ad5c5b9de5b5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Neg.java @@ -46,20 +46,20 @@ public ExpressionEvaluator.Factory toEvaluator(Function new NegIntsEvaluator(source(), f.get(dvrCtx), dvrCtx); + factory = new NegIntsEvaluator.Factory(source(), f); } // Unsigned longs are unsupported by choice; negating them would require implicitly converting to long. else if (type == DataTypes.LONG) { - supplier = dvrCtx -> new NegLongsEvaluator(source(), f.get(dvrCtx), dvrCtx); + factory = new NegLongsEvaluator.Factory(source(), f); } else if (type == DataTypes.DOUBLE) { - supplier = dvrCtx -> new NegDoublesEvaluator(f.get(dvrCtx), dvrCtx); + factory = new NegDoublesEvaluator.Factory(f); } - if (supplier != null) { - return supplier; + if (factory != null) { + return factory; } } else if (isTemporalAmount(type)) { return toEvaluator.apply(field()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java index ba071c05a15a8..d7999c87c4398 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/Sub.java @@ -35,11 +35,11 @@ public Sub(Source source, Expression left, Expression right) { left, right, SUB, - SubIntsEvaluator::new, - SubLongsEvaluator::new, - SubUnsignedLongsEvaluator::new, - (s, l, r, dvrCtx) -> new SubDoublesEvaluator(l, r, dvrCtx), - SubDatetimesEvaluator::new + SubIntsEvaluator.Factory::new, + SubLongsEvaluator.Factory::new, + SubUnsignedLongsEvaluator.Factory::new, + (s, lhs, rhs) -> new SubDoublesEvaluator.Factory(lhs, rhs), + SubDatetimesEvaluator.Factory::new ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java index eb10d50c72cdb..20ec1ac410f64 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypes.java @@ -112,7 +112,9 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -255,9 +257,11 @@ public static List namedTypeEntries() { of(LogicalPlan.class, EsRelation.class, PlanNamedTypes::writeEsRelation, PlanNamedTypes::readEsRelation), of(LogicalPlan.class, Eval.class, PlanNamedTypes::writeEval, PlanNamedTypes::readEval), of(LogicalPlan.class, Enrich.class, PlanNamedTypes::writeEnrich, PlanNamedTypes::readEnrich), + of(LogicalPlan.class, EsqlProject.class, PlanNamedTypes::writeEsqlProject, PlanNamedTypes::readEsqlProject), of(LogicalPlan.class, Filter.class, PlanNamedTypes::writeFilter, PlanNamedTypes::readFilter), of(LogicalPlan.class, Grok.class, PlanNamedTypes::writeGrok, PlanNamedTypes::readGrok), of(LogicalPlan.class, Limit.class, PlanNamedTypes::writeLimit, PlanNamedTypes::readLimit), + of(LogicalPlan.class, MvExpand.class, PlanNamedTypes::writeMvExpand, PlanNamedTypes::readMvExpand), of(LogicalPlan.class, OrderBy.class, PlanNamedTypes::writeOrderBy, PlanNamedTypes::readOrderBy), of(LogicalPlan.class, Project.class, PlanNamedTypes::writeProject, PlanNamedTypes::readProject), of(LogicalPlan.class, TopN.class, PlanNamedTypes::writeTopN, PlanNamedTypes::readTopN), @@ -581,13 +585,14 @@ static void writeLimitExec(PlanStreamOutput out, LimitExec limitExec) throws IOE } static MvExpandExec readMvExpandExec(PlanStreamInput in) throws IOException { - return new MvExpandExec(in.readSource(), in.readPhysicalPlanNode(), in.readNamedExpression()); + return new MvExpandExec(in.readSource(), in.readPhysicalPlanNode(), in.readNamedExpression(), in.readAttribute()); } static void writeMvExpandExec(PlanStreamOutput out, MvExpandExec mvExpandExec) throws IOException { out.writeNoSource(); out.writePhysicalPlanNode(mvExpandExec.child()); out.writeNamedExpression(mvExpandExec.target()); + out.writeAttribute(mvExpandExec.expanded()); } static OrderExec readOrderExec(PlanStreamInput in) throws IOException { @@ -683,7 +688,7 @@ static void writeDissect(PlanStreamOutput out, Dissect dissect) throws IOExcepti } static EsRelation readEsRelation(PlanStreamInput in) throws IOException { - return new EsRelation(in.readSource(), readEsIndex(in), readAttributes(in)); + return new EsRelation(in.readSource(), readEsIndex(in), readAttributes(in), in.readBoolean()); } static void writeEsRelation(PlanStreamOutput out, EsRelation relation) throws IOException { @@ -691,6 +696,7 @@ static void writeEsRelation(PlanStreamOutput out, EsRelation relation) throws IO out.writeNoSource(); writeEsIndex(out, relation.index()); writeAttributes(out, relation.output()); + out.writeBoolean(relation.frozen()); } static Eval readEval(PlanStreamInput in) throws IOException { @@ -725,6 +731,16 @@ static void writeEnrich(PlanStreamOutput out, Enrich enrich) throws IOException writeNamedExpressions(out, enrich.enrichFields()); } + static EsqlProject readEsqlProject(PlanStreamInput in) throws IOException { + return new EsqlProject(in.readSource(), in.readLogicalPlanNode(), readNamedExpressions(in)); + } + + static void writeEsqlProject(PlanStreamOutput out, EsqlProject project) throws IOException { + out.writeNoSource(); + out.writeLogicalPlanNode(project.child()); + writeNamedExpressions(out, project.projections()); + } + static Filter readFilter(PlanStreamInput in) throws IOException { return new Filter(in.readSource(), in.readLogicalPlanNode(), in.readExpression()); } @@ -764,6 +780,17 @@ static void writeLimit(PlanStreamOutput out, Limit limit) throws IOException { out.writeLogicalPlanNode(limit.child()); } + static MvExpand readMvExpand(PlanStreamInput in) throws IOException { + return new MvExpand(in.readSource(), in.readLogicalPlanNode(), in.readNamedExpression(), in.readAttribute()); + } + + static void writeMvExpand(PlanStreamOutput out, MvExpand mvExpand) throws IOException { + out.writeNoSource(); + out.writeLogicalPlanNode(mvExpand.child()); + out.writeNamedExpression(mvExpand.target()); + out.writeAttribute(mvExpand.expanded()); + } + static OrderBy readOrderBy(PlanStreamInput in) throws IOException { return new OrderBy( in.readSource(), diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java index 36d275909b47a..f0bc5697d4002 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalOptimizerContext.java @@ -10,4 +10,36 @@ import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.stats.SearchStats; -public record LocalLogicalOptimizerContext(EsqlConfiguration configuration, SearchStats searchStats) {} +import java.util.Objects; + +public final class LocalLogicalOptimizerContext extends LogicalOptimizerContext { + private final SearchStats searchStats; + + public LocalLogicalOptimizerContext(EsqlConfiguration configuration, SearchStats searchStats) { + super(configuration); + this.searchStats = searchStats; + } + + public SearchStats searchStats() { + return searchStats; + } + + @Override + public boolean equals(Object obj) { + if (super.equals(obj)) { + var that = (LocalLogicalOptimizerContext) obj; + return Objects.equals(this.searchStats, that.searchStats); + } + return false; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), searchStats); + } + + @Override + public String toString() { + return "LocalLogicalOptimizerContext[" + "configuration=" + configuration() + ", " + "searchStats=" + searchStats + ']'; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 9a49849b34e98..3451a3981d3e3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -116,10 +116,7 @@ else if (plan instanceof Project project) { } } - public abstract static class ParameterizedOptimizerRule extends ParameterizedRule< - SubPlan, - LogicalPlan, - P> { + abstract static class ParameterizedOptimizerRule extends ParameterizedRule { public final LogicalPlan apply(LogicalPlan plan, P context) { return plan.transformUp(typeToken(), t -> rule(t, context)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java new file mode 100644 index 0000000000000..bfd3a5569b0e9 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalOptimizerContext.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer; + +import org.elasticsearch.xpack.esql.session.EsqlConfiguration; + +import java.util.Objects; + +public class LogicalOptimizerContext { + private final EsqlConfiguration configuration; + + public LogicalOptimizerContext(EsqlConfiguration configuration) { + this.configuration = configuration; + } + + public EsqlConfiguration configuration() { + return configuration; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) return true; + if (obj == null || obj.getClass() != this.getClass()) return false; + var that = (LogicalOptimizerContext) obj; + return Objects.equals(this.configuration, that.configuration); + } + + @Override + public int hashCode() { + return Objects.hash(configuration); + } + + @Override + public String toString() { + return "LogicalOptimizerContext[" + "configuration=" + configuration + ']'; + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 23fa051c1d7a2..e7409543ca68e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.optimizer; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -18,11 +19,13 @@ import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.RegexExtract; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; +import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.LocalExecutionPlanner; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; @@ -49,16 +52,22 @@ import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.SetAsOptimized; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules.SimplifyComparisonsArithmetics; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; +import org.elasticsearch.xpack.ql.plan.logical.EsRelation; import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.plan.logical.OrderBy; import org.elasticsearch.xpack.ql.plan.logical.Project; import org.elasticsearch.xpack.ql.plan.logical.UnaryPlan; +import org.elasticsearch.xpack.ql.rule.ParameterizedRule; +import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.ql.rule.Rule; -import org.elasticsearch.xpack.ql.rule.RuleExecutor; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.CollectionUtils; import org.elasticsearch.xpack.ql.util.Holder; +import org.elasticsearch.xpack.ql.util.StringUtils; import java.time.ZoneId; import java.util.ArrayList; @@ -77,7 +86,11 @@ import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.PropagateNullable; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection; -public class LogicalPlanOptimizer extends RuleExecutor { +public class LogicalPlanOptimizer extends ParameterizedRuleExecutor { + + public LogicalPlanOptimizer(LogicalOptimizerContext optimizerContext) { + super(optimizerContext); + } public LogicalPlan optimize(LogicalPlan verified) { return verified.optimized() ? verified : execute(verified); @@ -95,13 +108,14 @@ protected static List> rules() { new SubstituteSurrogates(), new ReplaceRegexMatch(), new ReplaceAliasingEvalWithProject() - // new ReplaceTextFieldAttributesWithTheKeywordSubfield() + // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 ); var operators = new Batch<>( "Operator Optimization", new CombineProjections(), new CombineEvals(), + new ReplaceDuplicateAggWithEval(), new PruneEmptyPlans(), new PropagateEmptyRelation(), new ConvertStringToByteRef(), @@ -124,6 +138,7 @@ protected static List> rules() { new PruneColumns(), new PruneLiteralsInOrderBy(), new PushDownAndCombineLimits(), + new DuplicateLimitAfterMvExpand(), new PushDownAndCombineFilters(), new PushDownEval(), new PushDownRegexExtract(), @@ -135,9 +150,10 @@ protected static List> rules() { var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); var cleanup = new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN()); + var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions, operators, skip, cleanup, label); + return asList(substitutions, operators, skip, cleanup, defaultTopN, label); } // TODO: currently this rule only works for aggregate functions (AVG) @@ -409,6 +425,10 @@ private static Limit descendantLimit(UnaryPlan unary) { while (plan instanceof Aggregate == false) { if (plan instanceof Limit limit) { return limit; + } else if (plan instanceof MvExpand) { + // the limit that applies to mv_expand shouldn't be changed + // ie "| limit 1 | mv_expand x | limit 20" where we want that last "limit" to apply on expand results + return null; } if (plan.child() instanceof UnaryPlan unaryPlan) { plan = unaryPlan; @@ -420,6 +440,92 @@ private static Limit descendantLimit(UnaryPlan unary) { } } + static class DuplicateLimitAfterMvExpand extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(Limit limit) { + var child = limit.child(); + var shouldSkip = child instanceof Eval + || child instanceof Project + || child instanceof RegexExtract + || child instanceof Enrich + || child instanceof Limit; + + if (shouldSkip == false && child instanceof UnaryPlan unary) { + MvExpand mvExpand = descendantMvExpand(unary); + if (mvExpand != null) { + Limit limitBeforeMvExpand = limitBeforeMvExpand(mvExpand); + // if there is no "appropriate" limit before mv_expand, then push down a copy of the one after it so that: + // - a possible TopN is properly built as low as possible in the tree (closed to Lucene) + // - the input of mv_expand is as small as possible before it is expanded (less rows to inflate and occupy memory) + if (limitBeforeMvExpand == null) { + var duplicateLimit = new Limit(limit.source(), limit.limit(), mvExpand.child()); + return limit.replaceChild(propagateDuplicateLimitUntilMvExpand(duplicateLimit, mvExpand, unary)); + } + } + } + return limit; + } + + private static MvExpand descendantMvExpand(UnaryPlan unary) { + UnaryPlan plan = unary; + AttributeSet filterReferences = new AttributeSet(); + while (plan instanceof Aggregate == false) { + if (plan instanceof MvExpand mve) { + // don't return the mv_expand that has a filter after it which uses the expanded values + // since this will trigger the use of a potentially incorrect (too restrictive) limit further down in the tree + if (filterReferences.isEmpty() == false) { + if (filterReferences.contains(mve.target()) // the same field or reference attribute is used in mv_expand AND filter + || mve.target() instanceof ReferenceAttribute // or the mv_expand attr hasn't yet been resolved to a field attr + // or not all filter references have been resolved to field attributes + || filterReferences.stream().anyMatch(ref -> ref instanceof ReferenceAttribute)) { + return null; + } + } + return mve; + } else if (plan instanceof Filter filter) { + // gather all the filters' references to be checked later when a mv_expand is found + filterReferences.addAll(filter.references()); + } else if (plan instanceof OrderBy) { + // ordering after mv_expand COULD break the order of the results, so the limit shouldn't be copied past mv_expand + // something like from test | sort emp_no | mv_expand job_positions | sort first_name | limit 5 + // (the sort first_name likely changes the order of the docs after sort emp_no, so "limit 5" shouldn't be copied down + return null; + } + + if (plan.child() instanceof UnaryPlan unaryPlan) { + plan = unaryPlan; + } else { + break; + } + } + return null; + } + + private static Limit limitBeforeMvExpand(MvExpand mvExpand) { + UnaryPlan plan = mvExpand; + while (plan instanceof Aggregate == false) { + if (plan instanceof Limit limit) { + return limit; + } + if (plan.child() instanceof UnaryPlan unaryPlan) { + plan = unaryPlan; + } else { + break; + } + } + return null; + } + + private LogicalPlan propagateDuplicateLimitUntilMvExpand(Limit duplicateLimit, MvExpand mvExpand, UnaryPlan child) { + if (child == mvExpand) { + return mvExpand.replaceChild(duplicateLimit); + } else { + return child.replaceChild(propagateDuplicateLimitUntilMvExpand(duplicateLimit, mvExpand, (UnaryPlan) child.child())); + } + } + } + // 3 in (field, 4, 5) --> 3 in (field) or 3 in (4, 5) public static class SplitInWithFoldableValue extends OptimizerRules.OptimizerExpressionRule { @@ -494,7 +600,8 @@ protected LogicalPlan rule(UnaryPlan plan) { if (plan.child() instanceof LocalRelation local && local.supplier() == LocalSupplier.EMPTY) { // only care about non-grouped aggs might return something (count) if (plan instanceof Aggregate agg && agg.groupings().isEmpty()) { - p = skipPlan(plan, aggsFromEmpty(agg.aggregates())); + List emptyBlocks = aggsFromEmpty(agg.aggregates()); + p = skipPlan(plan, LocalSupplier.of(emptyBlocks.toArray(Block[]::new))); } else { p = skipPlan(plan); } @@ -502,25 +609,34 @@ protected LogicalPlan rule(UnaryPlan plan) { return p; } - private static LocalSupplier aggsFromEmpty(List aggs) { - Block[] blocks = new Block[aggs.size()]; + private static List aggsFromEmpty(List aggs) { + // TODO: Should we introduce skip operator that just never queries the source + List blocks = new ArrayList<>(); var blockFactory = BlockFactory.getNonBreakingInstance(); int i = 0; for (var agg : aggs) { // there needs to be an alias if (agg instanceof Alias a && a.child() instanceof AggregateFunction aggFunc) { - // look for count(literal) with literal != null - Object value = aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L : null; - var wrapper = BlockUtils.wrapperFor(blockFactory, LocalExecutionPlanner.toElementType(aggFunc.dataType()), 1); - wrapper.accept(value); - blocks[i++] = wrapper.builder().build(); - BlockUtils.constantBlock(blockFactory, value, 1); + List output = AbstractPhysicalOperationProviders.intermediateAttributes(List.of(agg), List.of()); + for (Attribute o : output) { + DataType dataType = o.dataType(); + // fill the boolean block later in LocalExecutionPlanner + if (dataType != DataTypes.BOOLEAN) { + // look for count(literal) with literal != null + var wrapper = BlockUtils.wrapperFor(blockFactory, LocalExecutionPlanner.toElementType(dataType), 1); + if (aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null)) { + wrapper.accept(0L); + } else { + wrapper.accept(null); + } + blocks.add(wrapper.builder().build()); + } + } } else { throw new EsqlIllegalArgumentException("Did not expect a non-aliased aggregation {}", agg); } } - - return LocalSupplier.of(blocks); + return blocks; } } @@ -664,7 +780,6 @@ protected LogicalPlan rule(Enrich re) { } protected static class PushDownAndCombineOrderBy extends OptimizerRules.OptimizerRule { - @Override protected LogicalPlan rule(OrderBy orderBy) { LogicalPlan child = orderBy.child(); @@ -860,6 +975,40 @@ protected LogicalPlan rule(Limit plan) { } } + /** + * This adds an explicit TopN node to a plan that only has an OrderBy right before Lucene. + * To date, the only known use case that "needs" this is a query of the form + * from test + * | sort emp_no + * | mv_expand first_name + * | rename first_name AS x + * | where x LIKE "*a*" + * | limit 15 + * + * or + * + * from test + * | sort emp_no + * | mv_expand first_name + * | sort first_name + * | limit 15 + * + * PushDownAndCombineLimits rule will copy the "limit 15" after "sort emp_no" if there is no filter on the expanded values + * OR if there is no sort between "limit" and "mv_expand". + * But, since this type of query has such a filter, the "sort emp_no" will have no limit when it reaches the current rule. + */ + static class AddDefaultTopN extends ParameterizedOptimizerRule { + + @Override + protected LogicalPlan rule(LogicalPlan plan, LogicalOptimizerContext context) { + if (plan instanceof UnaryPlan unary && unary.child() instanceof OrderBy order && order.child() instanceof EsRelation relation) { + var limit = new Literal(Source.EMPTY, context.configuration().resultTruncationMaxSize(), DataTypes.INTEGER); + return unary.replaceChild(new TopN(plan.source(), relation, order.order(), limit)); + } + return plan; + } + } + public static class ReplaceRegexMatch extends OptimizerRules.ReplaceRegexMatch { protected Expression regexToEquals(RegexMatch regexMatch, Literal literal) { @@ -945,6 +1094,140 @@ private LogicalPlan rule(Eval eval) { return plan; } + } + private abstract static class ParameterizedOptimizerRule extends ParameterizedRule< + SubPlan, + LogicalPlan, + P> { + + public final LogicalPlan apply(LogicalPlan plan, P context) { + return plan.transformDown(typeToken(), t -> rule(t, context)); + } + + protected abstract LogicalPlan rule(SubPlan plan, P context); + } + + /** + * Normalize aggregation functions by: + * 1. replaces reference to field attributes with their source + * 2. in case of Count, aligns the various forms (Count(1), Count(0), Count(), Count(*)) to Count(*) + */ + // TODO waiting on https://github.com/elastic/elasticsearch/issues/100634 + static class NormalizeAggregate extends Rule { + + @Override + public LogicalPlan apply(LogicalPlan plan) { + AttributeMap aliases = new AttributeMap<>(); + + // traverse the tree bottom-up + // 1. if it's Aggregate, normalize the aggregates + // regardless, collect the attributes but only if they refer to an attribute or literal + plan = plan.transformUp(p -> { + if (p instanceof Aggregate agg) { + p = normalize(agg, aliases); + } + p.forEachExpression(Alias.class, a -> { + var child = a.child(); + if (child.foldable() || child instanceof NamedExpression) { + aliases.putIfAbsent(a.toAttribute(), child); + } + }); + + return p; + }); + return plan; + } + + private static LogicalPlan normalize(Aggregate aggregate, AttributeMap aliases) { + var aggs = aggregate.aggregates(); + List newAggs = new ArrayList<>(aggs.size()); + boolean changed = false; + + for (NamedExpression agg : aggs) { + if (agg instanceof Alias as && as.child() instanceof AggregateFunction af) { + // replace field reference + if (af.field() instanceof NamedExpression ne) { + Attribute attr = ne.toAttribute(); + var resolved = aliases.resolve(attr, attr); + if (resolved != attr) { + changed = true; + var newChildren = CollectionUtils.combine(Collections.singletonList(resolved), af.parameters()); + // update the reference so Count can pick it up + af = (AggregateFunction) af.replaceChildren(newChildren); + agg = as.replaceChild(af); + } + } + // handle Count(*) + if (af instanceof Count count) { + var field = af.field(); + if (field.foldable()) { + var fold = field.fold(); + if (fold != null && StringUtils.WILDCARD.equals(fold) == false) { + changed = true; + var source = count.source(); + agg = as.replaceChild(new Count(source, new Literal(source, StringUtils.WILDCARD, DataTypes.KEYWORD))); + } + } + } + } + newAggs.add(agg); + } + return changed ? new Aggregate(aggregate.source(), aggregate.child(), aggregate.groupings(), newAggs) : aggregate; + } + } + + /** + * Replace aggregations that are duplicated inside an Aggregate with an Eval to avoid duplicated compute. + * stats a = min(x), b = min(x), c = count(*), d = count() by g + * becomes + * stats a = min(x), c = count(*) by g + * eval b = a, d = c + * keep a, b, c, d, g + */ + static class ReplaceDuplicateAggWithEval extends OptimizerRules.OptimizerRule { + + ReplaceDuplicateAggWithEval() { + super(TransformDirection.UP); + } + + @Override + protected LogicalPlan rule(Aggregate aggregate) { + LogicalPlan plan = aggregate; + + boolean foundDuplicate = false; + var aggs = aggregate.aggregates(); + Map seenAggs = Maps.newMapWithExpectedSize(aggs.size()); + List projections = new ArrayList<>(); + List keptAggs = new ArrayList<>(aggs.size()); + + for (NamedExpression agg : aggs) { + var attr = agg.toAttribute(); + if (agg instanceof Alias as && as.child() instanceof AggregateFunction af) { + var seen = seenAggs.putIfAbsent(af, attr); + if (seen != null) { + foundDuplicate = true; + projections.add(as.replaceChild(seen)); + } + // otherwise keep the agg in place + else { + keptAggs.add(agg); + projections.add(attr); + } + } else { + keptAggs.add(agg); + projections.add(attr); + } + } + + // at least one duplicate found - add the projection (to keep the output in place) + if (foundDuplicate) { + var source = aggregate.source(); + var newAggregate = new Aggregate(source, aggregate.child(), aggregate.groupings(), keptAggs); + plan = new Project(source, newAggregate, projections); + } + + return plan; + } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java index 3f05fa90ac8ab..a8d00920bef79 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizer.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; import org.elasticsearch.xpack.esql.plan.physical.ExchangeExec; import org.elasticsearch.xpack.esql.plan.physical.FragmentExec; +import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.RegexExtractExec; @@ -115,6 +116,9 @@ public PhysicalPlan apply(PhysicalPlan plan) { if (p instanceof RegexExtractExec ree) { attributes.removeAll(ree.extractedFields()); } + if (p instanceof MvExpandExec mvee) { + attributes.remove(mvee.expanded()); + } if (p instanceof EnrichExec ee) { for (NamedExpression enrichField : ee.enrichFields()) { // TODO: why is this different then the remove above? diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 418e11f248ab7..49caf0e4618bd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -110,7 +110,7 @@ public PlanFactory visitDissectCommand(EsqlBaseParser.DissectCommandContext ctx) Map options = visitCommandOptions(ctx.commandOptions()); String appendSeparator = ""; for (Map.Entry item : options.entrySet()) { - if (item.getKey().equals("append_separator") == false) { + if (item.getKey().equalsIgnoreCase("append_separator") == false) { throw new ParsingException(source(ctx), "Invalid option for dissect: [{}]", item.getKey()); } if (item.getValue() instanceof String == false) { @@ -150,7 +150,8 @@ public PlanFactory visitDissectCommand(EsqlBaseParser.DissectCommandContext ctx) @Override public PlanFactory visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { String identifier = visitSourceIdentifier(ctx.sourceIdentifier()); - return child -> new MvExpand(source(ctx), child, new UnresolvedAttribute(source(ctx), identifier)); + Source src = source(ctx); + return child -> new MvExpand(src, child, new UnresolvedAttribute(src, identifier), new UnresolvedAttribute(src, identifier)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 6f7830a12c708..17f669b5d30b3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -7,26 +7,50 @@ package org.elasticsearch.xpack.esql.plan.logical; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.ArrayList; +import java.util.List; import java.util.Objects; public class MvExpand extends UnaryPlan { private final NamedExpression target; + private final Attribute expanded; - public MvExpand(Source source, LogicalPlan child, NamedExpression target) { + private final List output; + + public MvExpand(Source source, LogicalPlan child, NamedExpression target, Attribute expanded) { super(source, child); this.target = target; + this.expanded = expanded; + this.output = calculateOutput(child.output(), target, expanded); + } + + public static List calculateOutput(List input, NamedExpression target, Attribute expanded) { + List result = new ArrayList<>(); + for (Attribute attribute : input) { + if (attribute.name().equals(target.name())) { + result.add(expanded); + } else { + result.add(attribute); + } + } + return result; } public NamedExpression target() { return target; } + public Attribute expanded() { + return expanded; + } + @Override public boolean expressionsResolved() { return target.resolved(); @@ -34,17 +58,22 @@ public boolean expressionsResolved() { @Override public UnaryPlan replaceChild(LogicalPlan newChild) { - return new MvExpand(source(), newChild, target); + return new MvExpand(source(), newChild, target, expanded); + } + + @Override + public List output() { + return output; } @Override protected NodeInfo info() { - return NodeInfo.create(this, MvExpand::new, child(), target); + return NodeInfo.create(this, MvExpand::new, child(), target, expanded); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), target); + return Objects.hash(super.hashCode(), target, expanded); } @Override @@ -52,6 +81,6 @@ public boolean equals(Object obj) { if (false == super.equals(obj)) { return false; } - return Objects.equals(target, ((MvExpand) obj).target); + return Objects.equals(target, ((MvExpand) obj).target) && Objects.equals(expanded, ((MvExpand) obj).expanded); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/MvExpandExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/MvExpandExec.java index 4bbd4b8aae2e3..816b6261c0f3d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/MvExpandExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/MvExpandExec.java @@ -6,38 +6,55 @@ */ package org.elasticsearch.xpack.esql.plan.physical; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; +import java.util.List; import java.util.Objects; +import static org.elasticsearch.xpack.esql.plan.logical.MvExpand.calculateOutput; + public class MvExpandExec extends UnaryExec { private final NamedExpression target; + private final Attribute expanded; + private final List output; - public MvExpandExec(Source source, PhysicalPlan child, NamedExpression target) { + public MvExpandExec(Source source, PhysicalPlan child, NamedExpression target, Attribute expanded) { super(source, child); this.target = target; + this.expanded = expanded; + this.output = calculateOutput(child.output(), target, expanded); } @Override protected NodeInfo info() { - return NodeInfo.create(this, MvExpandExec::new, child(), target); + return NodeInfo.create(this, MvExpandExec::new, child(), target, expanded); } @Override public MvExpandExec replaceChild(PhysicalPlan newChild) { - return new MvExpandExec(source(), newChild, target); + return new MvExpandExec(source(), newChild, target, expanded); } public NamedExpression target() { return target; } + public Attribute expanded() { + return expanded; + } + + @Override + public List output() { + return output; + } + @Override public int hashCode() { - return Objects.hash(target, child()); + return Objects.hash(target, child(), expanded); } @Override @@ -51,6 +68,6 @@ public boolean equals(Object obj) { MvExpandExec other = (MvExpandExec) obj; - return Objects.equals(target, other.target) && Objects.equals(child(), other.child()); + return Objects.equals(target, other.target) && Objects.equals(child(), other.child()) && Objects.equals(expanded, other.expanded); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java index 86ad56c115b3a..da81800c09402 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/AggregateMapper.java @@ -225,7 +225,7 @@ static String dataTypeToString(DataType type, Class aggClass) { return "Long"; } else if (type.equals(DataTypes.DOUBLE)) { return "Double"; - } else if (type.equals(DataTypes.KEYWORD) || type.equals(DataTypes.IP)) { + } else if (type.equals(DataTypes.KEYWORD) || type.equals(DataTypes.IP) || type.equals(DataTypes.TEXT)) { return "BytesRef"; } else { throw new EsqlIllegalArgumentException("illegal agg type: " + type.typeName()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java index 3131b8c8c1e20..f73ab716cb534 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsPhysicalOperationProviders.java @@ -12,21 +12,19 @@ import org.apache.lucene.search.Query; import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.lucene.BlockReaderFactories; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; -import org.elasticsearch.compute.lucene.ValueSourceInfo; -import org.elasticsearch.compute.lucene.ValueSources; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; +import org.elasticsearch.index.mapper.BlockDocValuesReader; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.search.NestedHelper; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortBuilder; @@ -45,13 +43,11 @@ import java.util.ArrayList; import java.util.List; import java.util.function.Function; -import java.util.function.Supplier; import static org.elasticsearch.common.lucene.search.Queries.newNonNestedFilter; import static org.elasticsearch.compute.lucene.LuceneSourceOperator.NO_LIMIT; public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProviders { - private static final Logger logger = LogManager.getLogger(EsPhysicalOperationProviders.class); private final List searchContexts; @@ -79,16 +75,18 @@ public final PhysicalOperation fieldExtractPhysicalOperation(FieldExtractExec fi DataType dataType = attr.dataType(); String fieldName = attr.name(); - Supplier> sources = () -> ValueSources.sources( + List factories = BlockReaderFactories.factories( searchContexts, fieldName, - EsqlDataTypes.isUnsupported(dataType), - LocalExecutionPlanner.toElementType(dataType) + EsqlDataTypes.isUnsupported(dataType) ); int docChannel = previousLayout.get(sourceAttr.id()).channel(); - op = op.with(new ValuesSourceReaderOperator.ValuesSourceReaderOperatorFactory(sources, docChannel, fieldName), layout.build()); + op = op.with( + new ValuesSourceReaderOperator.ValuesSourceReaderOperatorFactory(factories, docChannel, fieldName), + layout.build() + ); } return op; } @@ -138,8 +136,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, luceneFactory = new LuceneTopNSourceOperator.Factory( searchContexts, querySupplier, - context.dataPartitioning(), - context.taskConcurrency(), + context.queryPragmas().dataPartitioning(), + context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), limit, fieldSorts @@ -148,8 +146,8 @@ public final PhysicalOperation sourcePhysicalOperation(EsQueryExec esQueryExec, luceneFactory = new LuceneSourceOperator.Factory( searchContexts, querySupplier, - context.dataPartitioning(), - context.taskConcurrency(), + context.queryPragmas().dataPartitioning(), + context.queryPragmas().taskConcurrency(), context.pageSize(rowEstimatedSize), limit ); @@ -175,12 +173,8 @@ public final Operator.OperatorFactory ordinalGroupingOperatorFactory( // The grouping-by values are ready, let's group on them directly. // Costin: why are they ready and not already exposed in the layout? return new OrdinalsGroupingOperator.OrdinalsGroupingOperatorFactory( - () -> ValueSources.sources( - searchContexts, - attrSource.name(), - EsqlDataTypes.isUnsupported(attrSource.dataType()), - LocalExecutionPlanner.toElementType(attrSource.dataType()) - ), + BlockReaderFactories.factories(searchContexts, attrSource.name(), EsqlDataTypes.isUnsupported(attrSource.dataType())), + groupElementType, docChannel, attrSource.name(), aggregatorFactories, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 72a1a62707438..963b92c048382 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -16,7 +16,6 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.DataPartitioning; import org.elasticsearch.compute.lucene.LuceneCountOperator; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.operator.ColumnExtractOperator; @@ -76,6 +75,7 @@ import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -151,9 +151,7 @@ public LocalExecutionPlan plan(PhysicalPlan node) { var context = new LocalExecutionPlannerContext( new ArrayList<>(), new Holder<>(DriverParallelism.SINGLE), - configuration.pragmas().taskConcurrency(), - configuration.pragmas().dataPartitioning(), - configuration.pragmas().pageSize(), + configuration.pragmas(), bigArrays, blockFactory ); @@ -256,8 +254,8 @@ private PhysicalOperation planEsStats(EsStatsQueryExec statsQuery, LocalExecutio final LuceneOperator.Factory luceneFactory = new LuceneCountOperator.Factory( esProvider.searchContexts(), querySupplier, - context.dataPartitioning(), - context.taskConcurrency(), + context.queryPragmas.dataPartitioning(), + context.queryPragmas.taskConcurrency(), limit ); @@ -529,7 +527,7 @@ private PhysicalOperation planEnrich(EnrichExec enrich, LocalExecutionPlannerCon new EnrichLookupOperator.Factory( sessionId, parentTask, - 1, // TODO: Add a concurrent setting for enrich - also support unordered mode + context.queryPragmas().enrichMaxWorkers(), source.layout.get(enrich.matchField().id()).channel(), enrichLookupService, enrichIndex, @@ -612,8 +610,24 @@ private PhysicalOperation planLimit(LimitExec limit, LocalExecutionPlannerContex private PhysicalOperation planMvExpand(MvExpandExec mvExpandExec, LocalExecutionPlannerContext context) { PhysicalOperation source = plan(mvExpandExec.child(), context); + List childOutput = mvExpandExec.child().output(); int blockSize = 5000;// TODO estimate row size and use context.pageSize() - return source.with(new MvExpandOperator.Factory(source.layout.get(mvExpandExec.target().id()).channel(), blockSize), source.layout); + + Layout.Builder layout = new Layout.Builder(); + List inverse = source.layout.inverse(); + var expandedName = mvExpandExec.expanded().name(); + for (int index = 0; index < inverse.size(); index++) { + if (childOutput.get(index).name().equals(expandedName)) { + layout.append(mvExpandExec.expanded()); + } else { + layout.append(inverse.get(index)); + } + } + + return source.with( + new MvExpandOperator.Factory(source.layout.get(mvExpandExec.target().id()).channel(), blockSize), + layout.build() + ); } /** @@ -715,9 +729,7 @@ enum Type { public record LocalExecutionPlannerContext( List driverFactories, Holder driverParallelism, - int taskConcurrency, - DataPartitioning dataPartitioning, - int configuredPageSize, + QueryPragmas queryPragmas, BigArrays bigArrays, BlockFactory blockFactory ) { @@ -736,8 +748,8 @@ int pageSize(Integer estimatedRowSize) { if (estimatedRowSize == 0) { throw new IllegalStateException("estimated row size can't be 0"); } - if (configuredPageSize != 0) { - return configuredPageSize; + if (queryPragmas.pageSize() != 0) { + return queryPragmas.pageSize(); } return Math.max(SourceOperator.MIN_TARGET_PAGE_SIZE, SourceOperator.TARGET_PAGE_SIZE / estimatedRowSize); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java index eb50a1ceb4071..3eea84b0bd1f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/Mapper.java @@ -151,7 +151,7 @@ private PhysicalPlan map(UnaryPlan p, PhysicalPlan child) { } if (p instanceof MvExpand mvExpand) { - return new MvExpandExec(mvExpand.source(), map(mvExpand.child()), mvExpand.target()); + return new MvExpandExec(mvExpand.source(), map(mvExpand.child()), mvExpand.target(), mvExpand.expanded()); } // diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index fba2f00f0b314..4c05a70e607f9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -8,11 +8,8 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; @@ -33,21 +30,13 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; import org.elasticsearch.compute.operator.topn.TopNOperatorStatus; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; @@ -88,28 +77,25 @@ public class EsqlPlugin extends Plugin implements ActionPlugin { ); @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - CircuitBreaker circuitBreaker = indicesService.getBigArrays().breakerService().getBreaker("request"); + public Collection createComponents(PluginServices services) { + CircuitBreaker circuitBreaker = services.indicesService().getBigArrays().breakerService().getBreaker("request"); Objects.requireNonNull(circuitBreaker, "request circuit breaker wasn't set"); - BlockFactory blockFactory = new BlockFactory(circuitBreaker, indicesService.getBigArrays().withCircuitBreaking()); + BlockFactory blockFactory = new BlockFactory(circuitBreaker, services.indicesService().getBigArrays().withCircuitBreaking()); return List.of( - new PlanExecutor(new IndexResolver(client, clusterService.getClusterName().value(), EsqlDataTypeRegistry.INSTANCE, Set::of)), - new ExchangeService(clusterService.getSettings(), threadPool, EsqlPlugin.ESQL_THREAD_POOL_NAME, blockFactory), + new PlanExecutor( + new IndexResolver( + services.client(), + services.clusterService().getClusterName().value(), + EsqlDataTypeRegistry.INSTANCE, + Set::of + ) + ), + new ExchangeService( + services.clusterService().getSettings(), + services.threadPool(), + EsqlPlugin.ESQL_THREAD_POOL_NAME, + blockFactory + ), blockFactory ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java index 602e04ff08f6c..65a07c98af29a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/QueryPragmas.java @@ -28,6 +28,8 @@ public final class QueryPragmas implements Writeable { public static final Setting EXCHANGE_BUFFER_SIZE = Setting.intSetting("exchange_buffer_size", 10); public static final Setting EXCHANGE_CONCURRENT_CLIENTS = Setting.intSetting("exchange_concurrent_clients", 3); + public static final Setting ENRICH_MAX_WORKERS = Setting.intSetting("enrich_max_workers", 1); + private static final Setting TASK_CONCURRENCY = Setting.intSetting( "task_concurrency", ThreadPool.searchOrGetThreadPoolSize(EsExecutors.allocatedProcessors(Settings.EMPTY)) @@ -104,6 +106,14 @@ public TimeValue statusInterval() { return STATUS_INTERVAL.get(settings); } + /** + * Returns the maximum number of workers for enrich lookup. A higher number of workers reduces latency but increases cluster load. + * Defaults to 1. + */ + public int enrichMaxWorkers() { + return ENRICH_MAX_WORKERS.get(settings); + } + public boolean isEmpty() { return settings.isEmpty(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java index 0004f085eec56..d1a073f64fe81 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlSession.java @@ -54,7 +54,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.stream.Collectors; @@ -151,22 +150,33 @@ private void preAnalyze(LogicalPlan parsed, BiFunction policyNames = new HashSet<>(preAnalysis.policyNames); EnrichResolution resolution = new EnrichResolution(ConcurrentCollections.newConcurrentSet(), enrichPolicyResolver.allPolicyNames()); - AtomicReference resolvedIndex = new AtomicReference<>(); + ActionListener groupedListener = listener.delegateFailureAndWrap((l, unused) -> { assert resolution.resolvedPolicies().size() == policyNames.size() : resolution.resolvedPolicies().size() + " != " + policyNames.size(); - assert resolvedIndex.get() != null : "index wasn't resolved"; - l.onResponse(action.apply(resolvedIndex.get(), resolution)); + + // first we need the match_fields names from enrich policies and THEN, with an updated list of fields, we call field_caps API + var matchFields = resolution.resolvedPolicies() + .stream() + .filter(p -> p.index().isValid()) // only if the policy by the specified name was found; later the Verifier will be + // triggered + .map(p -> p.policy().getMatchField()) + .collect(Collectors.toSet()); + + preAnalyzeIndices( + parsed, + ActionListener.wrap(indexResolution -> l.onResponse(action.apply(indexResolution, resolution)), listener::onFailure), + matchFields + ); }); try (RefCountingListener refs = new RefCountingListener(groupedListener)) { - preAnalyzeIndices(parsed, refs.acquire(resolvedIndex::set)); for (String policyName : policyNames) { enrichPolicyResolver.resolvePolicy(policyName, refs.acquire(resolution.resolvedPolicies()::add)); } } } - private void preAnalyzeIndices(LogicalPlan parsed, ActionListener listener) { + private void preAnalyzeIndices(LogicalPlan parsed, ActionListener listener, Set enrichPolicyMatchFields) { PreAnalyzer.PreAnalysis preAnalysis = new PreAnalyzer().preAnalyze(parsed); // TODO we plan to support joins in the future when possible, but for now we'll just fail early if we see one if (preAnalysis.indices.size() > 1) { @@ -176,6 +186,11 @@ private void preAnalyzeIndices(LogicalPlan parsed, ActionListener fieldNames(LogicalPlan parsed) { if (fieldNames.isEmpty()) { return IndexResolver.ALL_FIELDS; } else { - fieldNames.addAll( - fieldNames.stream().filter(name -> name.endsWith(WILDCARD) == false).map(name -> name + ".*").collect(Collectors.toSet()) - ); + fieldNames.addAll(subfields(fieldNames)); return fieldNames; } } @@ -269,6 +282,10 @@ private static boolean matchByName(Attribute attr, String other, boolean skipIfP return isPattern ? Regex.simpleMatch(attr.qualifiedName(), other) : attr.qualifiedName().equals(other); } + private static Set subfields(Set names) { + return names.stream().filter(name -> name.endsWith(WILDCARD) == false).map(name -> name + ".*").collect(Collectors.toSet()); + } + public void optimizedPlan(LogicalPlan logicalPlan, ActionListener listener) { analyzedPlan(logicalPlan, map(listener, p -> { var plan = logicalPlanOptimizer.optimize(p); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index b0b7bf17d2ab4..13581710f7c53 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -52,6 +52,7 @@ import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -141,6 +142,7 @@ public class CsvTests extends ESTestCase { private static final Logger LOGGER = LogManager.getLogger(CsvTests.class); + private static final String IGNORED_CSV_FILE_NAMES_PATTERN = "-IT_tests_only"; private final String fileName; private final String groupName; @@ -153,14 +155,15 @@ public class CsvTests extends ESTestCase { ); private final FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); private final EsqlParser parser = new EsqlParser(); - private final LogicalPlanOptimizer logicalPlanOptimizer = new LogicalPlanOptimizer(); private final Mapper mapper = new Mapper(functionRegistry); private final PhysicalPlanOptimizer physicalPlanOptimizer = new TestPhysicalPlanOptimizer(new PhysicalOptimizerContext(configuration)); private ThreadPool threadPool; @ParametersFactory(argumentFormatting = "%2$s.%3$s") public static List readScriptSpec() throws Exception { - List urls = classpathResources("/*.csv-spec").stream().filter(x -> x.toString().contains("-ignoreCsvTests") == false).toList(); + List urls = classpathResources("/*.csv-spec").stream() + .filter(x -> x.toString().contains(IGNORED_CSV_FILE_NAMES_PATTERN) == false) + .toList(); assertTrue("Not enough specs found " + urls, urls.size() > 0); return SpecReader.readScriptSpec(urls, specParser()); } @@ -231,13 +234,16 @@ public boolean logResults() { private void doTest() throws Exception { BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); var actualResults = executePlan(bigArrays); - var expected = loadCsvSpecValues(testCase.expectedResults); + try { + var expected = loadCsvSpecValues(testCase.expectedResults); - var log = logResults() ? LOGGER : null; - assertResults(expected, actualResults, testCase.ignoreOrder, log); - assertWarnings(actualResults.responseHeaders().getOrDefault("Warning", List.of())); - Releasables.close(() -> Iterators.map(actualResults.pages().iterator(), p -> p::releaseBlocks)); - assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + var log = logResults() ? LOGGER : null; + assertResults(expected, actualResults, testCase.ignoreOrder, log); + assertWarnings(actualResults.responseHeaders().getOrDefault("Warning", List.of())); + } finally { + Releasables.close(() -> Iterators.map(actualResults.pages().iterator(), p -> p::releaseBlocks)); + assertThat(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST).getUsed(), equalTo(0L)); + } } protected void assertResults(ExpectedResults expected, ActualResults actual, boolean ignoreOrder, Logger logger) { @@ -286,7 +292,7 @@ private PhysicalPlan physicalPlan(LogicalPlan parsed, CsvTestsDataLoader.TestsDa var enrichPolicies = loadEnrichPolicies(); var analyzer = new Analyzer(new AnalyzerContext(configuration, functionRegistry, indexResolution, enrichPolicies), TEST_VERIFIER); var analyzed = analyzer.analyze(parsed); - var logicalOptimized = logicalPlanOptimizer.optimize(analyzed); + var logicalOptimized = new LogicalPlanOptimizer(new LogicalOptimizerContext(configuration)).optimize(analyzed); var physicalPlan = mapper.map(logicalOptimized); var optimizedPlan = EstimatesRowSize.estimateRowSize(0, physicalPlanOptimizer.optimize(physicalPlan)); opportunisticallyAssertPlanSerialization(physicalPlan, optimizedPlan); // comment out to disable serialization diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 0349a1874415b..6e75eea75f655 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -1031,27 +1031,6 @@ public void testDateTruncWithNumericInterval() { """, "second argument of [date_trunc(1, date)] must be [dateperiod or timeduration], found value [1] type [integer]"); } - public void testDateExtractWithSwappedArguments() { - verifyUnsupported(""" - from test - | eval date_extract(date, "year") - """, "function definition has been updated, please swap arguments in [date_extract(date, \"year\")]"); - } - - public void testDateFormatWithSwappedArguments() { - verifyUnsupported(""" - from test - | eval date_format(date, "yyyy-MM-dd") - """, "function definition has been updated, please swap arguments in [date_format(date, \"yyyy-MM-dd\")]"); - } - - public void testDateTruncWithSwappedArguments() { - verifyUnsupported(""" - from test - | eval date_trunc(date, 1 month) - """, "function definition has been updated, please swap arguments in [date_trunc(date, 1 month)]"); - } - public void testDateTruncWithDateInterval() { verifyUnsupported(""" from test @@ -1295,7 +1274,7 @@ public void testEnrichWrongMatchFieldType() { """)); assertThat( e.getMessage(), - containsString("Unsupported type [INTEGER] for enrich matching field [languages]; only KEYWORD allowed") + containsString("Unsupported type [INTEGER] for enrich matching field [languages]; only KEYWORD allowed") ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 3a6479215f479..a4c5638ae815f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -61,12 +61,14 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -107,7 +109,7 @@ public static Literal randomLiteral(DataType type) { case "ip" -> new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); case "time_duration" -> Duration.ofMillis(randomLongBetween(-604800000L, 604800000L)); // plus/minus 7 days case "text" -> new BytesRef(randomAlphaOfLength(50)); - case "version" -> new Version(randomIdentifier()).toBytesRef(); + case "version" -> randomVersion().toBytesRef(); case "null" -> null; default -> throw new IllegalArgumentException("can't make random values for [" + type.typeName() + "]"); }, type); @@ -220,7 +222,9 @@ private void testEvaluate(boolean readFloating) { } } assertThat(result, not(equalTo(Double.NaN))); + assert testCase.getMatcher().matches(Double.POSITIVE_INFINITY) == false; assertThat(result, not(equalTo(Double.POSITIVE_INFINITY))); + assert testCase.getMatcher().matches(Double.NEGATIVE_INFINITY) == false; assertThat(result, not(equalTo(Double.NEGATIVE_INFINITY))); assertThat(result, testCase.getMatcher()); if (testCase.getExpectedWarnings() != null) { @@ -293,6 +297,7 @@ public final void testCrankyEvaluateBlockWithoutNulls() { * input pattern contained only a single value. *

    */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100820") public final void testCrankyEvaluateBlockWithoutNullsFloating() { assumeTrue("sometimes the cranky breaker silences warnings, just skip these cases", testCase.getExpectedWarnings() == null); try { @@ -454,12 +459,19 @@ public final void testEvaluateInManyThreads() throws ExecutionException, Interru public final void testEvaluatorToString() { assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); - var supplier = evaluator(buildFieldExpression(testCase)); - try (ExpressionEvaluator ev = supplier.get(driverContext())) { + var factory = evaluator(buildFieldExpression(testCase)); + try (ExpressionEvaluator ev = factory.get(driverContext())) { assertThat(ev.toString(), equalTo(testCase.evaluatorToString)); } } + public final void testFactoryToString() { + assumeTrue("nothing to do if a type error", testCase.getExpectedTypeError() == null); + assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); + var factory = evaluator(buildFieldExpression(testCase)); + assertThat(factory.toString(), equalTo(testCase.evaluatorToString)); + } + public final void testFold() { Expression expression = buildLiteralExpression(testCase); if (testCase.getExpectedTypeError() != null) { @@ -506,17 +518,20 @@ public static void testFunctionInfo() { } for (int i = 0; i < args.size(); i++) { - Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toSet()); + Set annotationTypes = Arrays.stream(args.get(i).type()).collect(Collectors.toCollection(() -> new TreeSet<>())); if (annotationTypes.equals(Set.of("?"))) { continue; // TODO remove this eventually, so that all the functions will have to provide signature info } Set signatureTypes = typesFromSignature.get(i); + if (signatureTypes.isEmpty()) { + continue; + } assertEquals(annotationTypes, signatureTypes); } - Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toSet()); - if (returnTypes.equals(Set.of("?")) == false) { // TODO remove this eventually, so that all the functions will have to provide - // singature info + Set returnTypes = Arrays.stream(description.returnType()).collect(Collectors.toCollection(() -> new TreeSet<>())); + if (returnTypes.equals(Set.of("?")) == false) { + // TODO remove this eventually, so that all the functions will have to provide signature info assertEquals(returnTypes, returnFromSignature); } } @@ -534,11 +549,7 @@ public static void testFunctionInfo() { * on input types like {@link Greatest} or {@link Coalesce}. */ protected static List anyNullIsNull(boolean entirelyNullPreservesType, List testCaseSuppliers) { - for (TestCaseSupplier s : testCaseSuppliers) { - if (s.types() == null) { - throw new IllegalArgumentException("types required"); - } - } + typesRequired(testCaseSuppliers); List suppliers = new ArrayList<>(testCaseSuppliers.size()); suppliers.addAll(testCaseSuppliers); @@ -613,11 +624,7 @@ protected static List anyNullIsNull(boolean entirelyNullPreser * that they throw type errors. */ protected static List errorsForCasesWithoutExamples(List testCaseSuppliers) { - for (TestCaseSupplier s : testCaseSuppliers) { - if (s.types() == null) { - throw new IllegalArgumentException("types required"); - } - } + typesRequired(testCaseSuppliers); List suppliers = new ArrayList<>(testCaseSuppliers.size()); suppliers.addAll(testCaseSuppliers); @@ -642,6 +649,13 @@ protected static List errorsForCasesWithoutExamples(List suppliers) { + String bad = suppliers.stream().filter(s -> s.types() == null).map(s -> s.name()).collect(Collectors.joining("\n")); + if (bad.equals("") == false) { + throw new IllegalArgumentException("types required but not found for these tests:\n" + bad); + } + } + private static List> validPerPosition(Set> valid) { int max = valid.stream().mapToInt(List::size).max().getAsInt(); List> result = new ArrayList<>(max); @@ -733,7 +747,9 @@ private static String expectedType(Set validTypes) { * don't have a test case covering explicit `null` arguments in * this position. Generally you can get that with anyNullIsNull. */ - throw new UnsupportedOperationException("can't guess expected types for " + validTypes); + throw new UnsupportedOperationException( + "can't guess expected types for " + validTypes.stream().sorted(Comparator.comparing(t -> t.typeName())).toList() + ); } return named; } @@ -881,4 +897,14 @@ public void allMemoryReleased() { assertThat(breaker.getUsed(), equalTo(0L)); } } + + static Version randomVersion() { + // TODO degenerate versions and stuff + return switch (between(0, 2)) { + case 0 -> new Version(Integer.toString(between(0, 100))); + case 1 -> new Version(between(0, 100) + "." + between(0, 100)); + case 2 -> new Version(between(0, 100) + "." + between(0, 100) + "." + between(0, 100)); + default -> throw new IllegalArgumentException(); + }; + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 8ca09494fb06c..e49776db1edea 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -70,7 +70,9 @@ public TestCase get() { if (types != null) { for (int i = 0; i < types.size(); i++) { if (supplied.getData().get(i).type() != types.get(i)) { - throw new IllegalStateException("supplier/data type mismatch " + supplied.getData().get(i).type() + "/" + types.get(i)); + throw new IllegalStateException( + name + ": supplier/data type mismatch " + supplied.getData().get(i).type() + "/" + types.get(i) + ); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java index 79089864daa4a..043bf083b580a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/AutoBucketTests.java @@ -46,7 +46,36 @@ public static Iterable parameters() { args, "DateTruncEvaluator[fieldVal=Attribute[channel=0], rounding=Rounding[DAY_OF_MONTH in Z][fixed to midnight]]", DataTypes.DATETIME, - resultsMatcher(args) + dateResultsMatcher(args) + ); + }), new TestCaseSupplier("Autobucket Single long", () -> { + List args = List.of(new TestCaseSupplier.TypedData(100L, DataTypes.LONG, "arg")); + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" + + "val=DivDoublesEvaluator[lhs=CastLongToDoubleEvaluator[v=Attribute[channel=0]], " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + numericResultsMatcher(args, 100.0) + ); + }), new TestCaseSupplier("Autobucket Single int", () -> { + List args = List.of(new TestCaseSupplier.TypedData(100, DataTypes.INTEGER, "arg")); + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[" + + "val=DivDoublesEvaluator[lhs=CastIntToDoubleEvaluator[v=Attribute[channel=0]], " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + numericResultsMatcher(args, 100.0) + ); + }), new TestCaseSupplier("Autobucket Single double", () -> { + List args = List.of(new TestCaseSupplier.TypedData(100.0, DataTypes.DOUBLE, "arg")); + return new TestCaseSupplier.TestCase( + args, + "MulDoublesEvaluator[lhs=FloorDoubleEvaluator[val=DivDoublesEvaluator[lhs=Attribute[channel=0], " + + "rhs=LiteralsEvaluator[lit=50.0]]], rhs=LiteralsEvaluator[lit=50.0]]", + DataTypes.DOUBLE, + numericResultsMatcher(args, 100.0) ); }))); } @@ -72,11 +101,15 @@ protected DataType expectedType(List argTypes) { return argTypes.get(0); } - private static Matcher resultsMatcher(List typedData) { + private static Matcher dateResultsMatcher(List typedData) { long millis = ((Number) typedData.get(0).data()).longValue(); return equalTo(Rounding.builder(Rounding.DateTimeUnit.DAY_OF_MONTH).build().prepareForUnknown().round(millis)); } + private static Matcher numericResultsMatcher(List typedData, Object value) { + return equalTo(value); + } + @Override protected List argSpec() { DataType[] numerics = numerics(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java index a8744206e91f9..58f56e54c7245 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/PowTests.java @@ -185,8 +185,8 @@ public static Iterable parameters() { ) ), new TestCaseSupplier("pow(integer, double)", () -> { - // Negative numbers to a non-integer power are NaN - int base = randomIntBetween(0, 1000); + // Positive numbers to a non-integer power + int base = randomIntBetween(1, 1000); double exp = randomDoubleBetween(-10.0, 10.0, true); double expected = Math.pow(base, exp); TestCaseSupplier.TestCase testCase = new TestCaseSupplier.TestCase( @@ -336,7 +336,7 @@ public static Iterable parameters() { }), new TestCaseSupplier("pow(long, double)", () -> { // Negative numbers to non-integer power are NaN - long base = randomLongBetween(0, 1000); + long base = randomLongBetween(1, 1000); double exp = randomDoubleBetween(-10.0, 10.0, true); double expected = Math.pow(base, exp); TestCaseSupplier.TestCase testCase = new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java index 2519b41fdac2e..a87cc379e8c3f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/AbstractMultivalueFunctionTestCase.java @@ -25,6 +25,7 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.stream.DoubleStream; import java.util.stream.IntStream; import java.util.stream.LongStream; @@ -32,7 +33,7 @@ public abstract class AbstractMultivalueFunctionTestCase extends AbstractScalarFunctionTestCase { /** - * Build a test case with {@code boolean} values. + * Build many test cases with {@code boolean} values. */ protected static void booleans( List cases, @@ -44,7 +45,7 @@ protected static void booleans( } /** - * Build a test case with {@code boolean} values. + * Build many test cases with {@code boolean} values. */ protected static void booleans( List cases, @@ -56,6 +57,7 @@ protected static void booleans( cases.add( new TestCaseSupplier( name + "(false)", + List.of(DataTypes.BOOLEAN), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(false), DataTypes.BOOLEAN, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -67,6 +69,7 @@ protected static void booleans( cases.add( new TestCaseSupplier( name + "(true)", + List.of(DataTypes.BOOLEAN), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(true), DataTypes.BOOLEAN, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -76,7 +79,7 @@ protected static void booleans( ) ); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.BOOLEAN), () -> { List mvData = randomList(2, 100, ESTestCase::randomBoolean); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( @@ -90,7 +93,7 @@ protected static void booleans( } /** - * Build a test case with {@link BytesRef} values. + * Build many test cases with {@link BytesRef} values. */ protected static void bytesRefs( List cases, @@ -98,55 +101,60 @@ protected static void bytesRefs( String evaluatorName, BiFunction, Matcher> matcher ) { - bytesRefs(cases, name, evaluatorName, DataTypes.KEYWORD, matcher); + bytesRefs(cases, name, evaluatorName, t -> t, matcher); } /** - * Build a test case with {@link BytesRef} values. + * Build many test cases with {@link BytesRef} values. */ protected static void bytesRefs( List cases, String name, String evaluatorName, - DataType expectedDataType, + Function expectedDataType, BiFunction, Matcher> matcher ) { - cases.add( - new TestCaseSupplier( - name + "(empty string)", - () -> new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(new BytesRef("")), DataTypes.KEYWORD, "field")), - evaluatorName + "[field=Attribute[channel=0]]", - expectedDataType, - matcher.apply(1, Stream.of(new BytesRef(""))) - ) - ) - ); - cases.add(new TestCaseSupplier(name + "(BytesRef)", () -> { - BytesRef data = new BytesRef(randomAlphaOfLength(10)); - return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.KEYWORD, "field")), - evaluatorName + "[field=Attribute[channel=0]]", - expectedDataType, - matcher.apply(1, Stream.of(data)) - ); - })); - for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, () -> { - List mvData = randomList(1, 100, () -> new BytesRef(randomAlphaOfLength(10))); - putInOrder(mvData, ordering); + for (DataType type : new DataType[] { DataTypes.KEYWORD, DataTypes.TEXT, DataTypes.IP, DataTypes.VERSION }) { + if (type != DataTypes.IP) { + cases.add( + new TestCaseSupplier( + name + "(empty " + type.typeName() + ")", + List.of(type), + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(List.of(new BytesRef("")), type, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType.apply(type), + matcher.apply(1, Stream.of(new BytesRef(""))) + ) + ) + ); + } + cases.add(new TestCaseSupplier(name + "(" + type.typeName() + ")", List.of(type), () -> { + BytesRef data = (BytesRef) randomLiteral(type).value(); return new TestCaseSupplier.TestCase( - List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.KEYWORD, "field")), + List.of(new TestCaseSupplier.TypedData(List.of(data), type, "field")), evaluatorName + "[field=Attribute[channel=0]]", - expectedDataType, - matcher.apply(mvData.size(), mvData.stream()) + expectedDataType.apply(type), + matcher.apply(1, Stream.of(data)) ); })); + for (Block.MvOrdering ordering : Block.MvOrdering.values()) { + cases.add(new TestCaseSupplier(name + "(<" + type.typeName() + "s>) " + ordering, List.of(type), () -> { + List mvData = randomList(1, 100, () -> (BytesRef) randomLiteral(type).value()); + putInOrder(mvData, ordering); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(mvData, type, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType.apply(type), + matcher.apply(mvData.size(), mvData.stream()) + ); + })); + } } } /** - * Build a test case with {@code double} values. + * Build many test cases with {@code double} values. */ protected static void doubles( List cases, @@ -158,7 +166,7 @@ protected static void doubles( } /** - * Build a test case with {@code double} values. + * Build many test cases with {@code double} values. */ protected static void doubles( List cases, @@ -170,6 +178,7 @@ protected static void doubles( cases.add( new TestCaseSupplier( name + "(0.0)", + List.of(DataTypes.DOUBLE), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(0.0), DataTypes.DOUBLE, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -178,7 +187,7 @@ protected static void doubles( ) ) ); - cases.add(new TestCaseSupplier(name + "(double)", () -> { + cases.add(new TestCaseSupplier(name + "(double)", List.of(DataTypes.DOUBLE), () -> { double mvData = randomDouble(); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(mvData), DataTypes.DOUBLE, "field")), @@ -188,7 +197,7 @@ protected static void doubles( ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.DOUBLE), () -> { List mvData = randomList(1, 100, ESTestCase::randomDouble); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( @@ -202,7 +211,7 @@ protected static void doubles( } /** - * Build a test case with {@code int} values. + * Build many test cases with {@code int} values. */ protected static void ints( List cases, @@ -214,7 +223,7 @@ protected static void ints( } /** - * Build a test case with {@code int} values. + * Build many test cases with {@code int} values. */ protected static void ints( List cases, @@ -226,6 +235,7 @@ protected static void ints( cases.add( new TestCaseSupplier( name + "(0)", + List.of(DataTypes.INTEGER), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(0), DataTypes.INTEGER, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -234,7 +244,7 @@ protected static void ints( ) ) ); - cases.add(new TestCaseSupplier(name + "(int)", () -> { + cases.add(new TestCaseSupplier(name + "(int)", List.of(DataTypes.INTEGER), () -> { int data = randomInt(); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.INTEGER, "field")), @@ -244,7 +254,7 @@ protected static void ints( ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.INTEGER), () -> { List mvData = randomList(1, 100, ESTestCase::randomInt); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( @@ -258,7 +268,7 @@ protected static void ints( } /** - * Build a test case with {@code long} values. + * Build many test cases with {@code long} values. */ protected static void longs( List cases, @@ -270,7 +280,7 @@ protected static void longs( } /** - * Build a test case with {@code long} values. + * Build many test cases with {@code long} values. */ protected static void longs( List cases, @@ -282,6 +292,7 @@ protected static void longs( cases.add( new TestCaseSupplier( name + "(0L)", + List.of(DataTypes.LONG), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(0L), DataTypes.LONG, "field")), evaluatorName + "[field=Attribute[channel=0]]", @@ -290,7 +301,7 @@ protected static void longs( ) ) ); - cases.add(new TestCaseSupplier(name + "(long)", () -> { + cases.add(new TestCaseSupplier(name + "(long)", List.of(DataTypes.LONG), () -> { long data = randomLong(); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.LONG, "field")), @@ -300,7 +311,7 @@ protected static void longs( ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.LONG), () -> { List mvData = randomList(1, 100, ESTestCase::randomLong); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( @@ -314,7 +325,64 @@ protected static void longs( } /** - * Build a test case with unsigned {@code long} values. + * Build many test cases with {@code date} values. + */ + protected static void dateTimes( + List cases, + String name, + String evaluatorName, + BiFunction> matcher + ) { + dateTimes(cases, name, evaluatorName, DataTypes.DATETIME, matcher); + } + + /** + * Build many test cases with {@code date} values. + */ + protected static void dateTimes( + List cases, + String name, + String evaluatorName, + DataType expectedDataType, + BiFunction> matcher + ) { + cases.add( + new TestCaseSupplier( + name + "(epoch)", + List.of(DataTypes.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(List.of(0L), DataTypes.DATETIME, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType, + matcher.apply(1, LongStream.of(0L)) + ) + ) + ); + cases.add(new TestCaseSupplier(name + "(date)", List.of(DataTypes.DATETIME), () -> { + long data = randomLong(); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.DATETIME, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType, + matcher.apply(1, LongStream.of(data)) + ); + })); + for (Block.MvOrdering ordering : Block.MvOrdering.values()) { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.DATETIME), () -> { + List mvData = randomList(1, 100, ESTestCase::randomLong); + putInOrder(mvData, ordering); + return new TestCaseSupplier.TestCase( + List.of(new TestCaseSupplier.TypedData(mvData, DataTypes.DATETIME, "field")), + evaluatorName + "[field=Attribute[channel=0]]", + expectedDataType, + matcher.apply(mvData.size(), mvData.stream().mapToLong(Long::longValue)) + ); + })); + } + } + + /** + * Build many test cases with unsigned {@code long} values. */ protected static void unsignedLongs( List cases, @@ -326,7 +394,7 @@ protected static void unsignedLongs( } /** - * Build a test case with unsigned {@code long} values. + * Build many test cases with unsigned {@code long} values. */ protected static void unsignedLongs( List cases, @@ -338,6 +406,7 @@ protected static void unsignedLongs( cases.add( new TestCaseSupplier( name + "(0UL)", + List.of(DataTypes.UNSIGNED_LONG), () -> new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData( @@ -352,7 +421,7 @@ protected static void unsignedLongs( ) ) ); - cases.add(new TestCaseSupplier(name + "(unsigned long)", () -> { + cases.add(new TestCaseSupplier(name + "(unsigned long)", List.of(DataTypes.UNSIGNED_LONG), () -> { long data = randomLong(); return new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(data), DataTypes.UNSIGNED_LONG, "field")), @@ -362,7 +431,7 @@ protected static void unsignedLongs( ); })); for (Block.MvOrdering ordering : Block.MvOrdering.values()) { - cases.add(new TestCaseSupplier(name + "() " + ordering, () -> { + cases.add(new TestCaseSupplier(name + "() " + ordering, List.of(DataTypes.UNSIGNED_LONG), () -> { List mvData = randomList(1, 100, ESTestCase::randomLong); putInOrder(mvData, ordering); return new TestCaseSupplier.TestCase( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java index b0e459164de71..b1070cb7eb12b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java @@ -55,7 +55,7 @@ public static Iterable parameters() { */ (size, data) -> avg.apply(size, data.mapToDouble(v -> NumericUtils.unsignedLongToDouble(NumericUtils.asLongUnsigned(v)))) ); - return parameterSuppliersFromTypedData(cases); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java index a13a43bdee75c..deffc42244c10 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java @@ -31,12 +31,13 @@ public MvCountTests(@Name("TestCase") Supplier testCa public static Iterable parameters() { List cases = new ArrayList<>(); booleans(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - bytesRefs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + bytesRefs(cases, "mv_count", "MvCount", t -> DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); doubles(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); ints(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); longs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); unsignedLongs(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - return parameterSuppliersFromTypedData(cases); + dateTimes(cases, "mv_count", "MvCount", DataTypes.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, cases))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java index 713ae263705d3..375a7a769ccfd 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java @@ -38,6 +38,7 @@ public static Iterable parameters() { List cases = new ArrayList<>(); booleans(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); bytesRefs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); + dateTimes(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); doubles(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Double::valueOf))); ints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Integer::valueOf))); longs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values.mapToObj(Long::valueOf))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java index 556cedf259a86..8f7292adb86a4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java @@ -43,7 +43,8 @@ public static Iterable parameters() { "MvMax", (size, values) -> equalTo(NumericUtils.asLongUnsigned(values.reduce(BigInteger::max).get())) ); - return parameterSuppliersFromTypedData(cases); + dateTimes(cases, "mv_max", "MvMax", (size, values) -> equalTo(values.max().getAsLong())); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java index 047dc4fe64641..ce83c4bb8f786 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java @@ -73,6 +73,7 @@ public static Iterable parameters() { cases.add( new TestCaseSupplier( "mv_median(<1, 2>)", + List.of(DataTypes.INTEGER), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(1, 2), DataTypes.INTEGER, "field")), "MvMedian[field=Attribute[channel=0]]", @@ -84,6 +85,7 @@ public static Iterable parameters() { cases.add( new TestCaseSupplier( "mv_median(<-1, -2>)", + List.of(DataTypes.INTEGER), () -> new TestCaseSupplier.TestCase( List.of(new TestCaseSupplier.TypedData(List.of(-1, -2), DataTypes.INTEGER, "field")), "MvMedian[field=Attribute[channel=0]]", @@ -92,7 +94,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedData(cases); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java index c1dd713e6639c..750d5d4cb89ce 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java @@ -43,7 +43,8 @@ public static Iterable parameters() { "MvMin", (size, values) -> equalTo(NumericUtils.asLongUnsigned(values.reduce(BigInteger::min).get())) ); - return parameterSuppliersFromTypedData(cases); + dateTimes(cases, "mv_min", "MvMin", (size, values) -> equalTo(values.min().getAsLong())); + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(false, cases))); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java index 5d201ad22371c..85612427a1867 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/io/stream/PlanNamedTypesTests.java @@ -49,7 +49,9 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -157,12 +159,14 @@ public void testPhysicalPlanEntries() { public static final List> LOGICAL_PLAN_NODE_CLS = List.of( Aggregate.class, Dissect.class, + Enrich.class, EsRelation.class, + EsqlProject.class, Eval.class, - Enrich.class, Filter.class, Grok.class, Limit.class, + MvExpand.class, OrderBy.class, Project.class, TopN.class @@ -175,6 +179,7 @@ public void testLogicalPlanEntries() { .stream() .filter(e -> e.categoryClass().isAssignableFrom(LogicalPlan.class)) .map(PlanNameRegistry.Entry::name) + .sorted() .toList(); assertThat(actual, equalTo(expected)); } @@ -459,6 +464,38 @@ public void testDissectParserSimple() throws IOException { EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); } + public void testEsRelation() throws IOException { + var orig = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomBoolean()); + BytesStreamOutput bso = new BytesStreamOutput(); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanNamedTypes.writeEsRelation(out, orig); + var deser = PlanNamedTypes.readEsRelation(planStreamInput(bso)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); + } + + public void testEsqlProject() throws IOException { + var orig = new EsqlProject( + Source.EMPTY, + new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomBoolean()), + List.of(randomFieldAttribute()) + ); + BytesStreamOutput bso = new BytesStreamOutput(); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanNamedTypes.writeEsqlProject(out, orig); + var deser = PlanNamedTypes.readEsqlProject(planStreamInput(bso)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); + } + + public void testMvExpand() throws IOException { + var esRelation = new EsRelation(Source.EMPTY, randomEsIndex(), List.of(randomFieldAttribute()), randomBoolean()); + var orig = new MvExpand(Source.EMPTY, esRelation, randomFieldAttribute(), randomFieldAttribute()); + BytesStreamOutput bso = new BytesStreamOutput(); + PlanStreamOutput out = new PlanStreamOutput(bso, planNameRegistry); + PlanNamedTypes.writeMvExpand(out, orig); + var deser = PlanNamedTypes.readMvExpand(planStreamInput(bso)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(orig, unused -> deser); + } + private static void assertNamedExpression(NamedExpression origObj) { var deserObj = serializeDeserialize(origObj, PlanStreamOutput::writeExpression, PlanStreamInput::readNamedExpression); EqualsHashCodeTestUtils.checkEqualsAndHashCode(origObj, unused -> deserObj); @@ -474,6 +511,14 @@ private static void assertNamedEsField(EsField origObj) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origObj, unused -> deserObj); } + static EsIndex randomEsIndex() { + return new EsIndex( + randomAlphaOfLength(randomIntBetween(1, 25)), + Map.of(randomAlphaOfLength(randomIntBetween(1, 25)), randomKeywordEsField()), + Set.of(randomAlphaOfLength(randomIntBetween(1, 25)), randomAlphaOfLength(randomIntBetween(1, 25))) + ); + } + static UnsupportedAttribute randomUnsupportedAttribute() { return new UnsupportedAttribute( Source.EMPTY, @@ -626,12 +671,13 @@ static Map randomProperties(int depth) { if (depth > 2) { return Map.of(); // prevent infinite recursion (between EsField and properties) } + depth += 1; int size = randomIntBetween(0, 5); Map map = new HashMap<>(); for (int i = 0; i < size; i++) { map.put( randomAlphaOfLength(randomIntBetween(1, 10)), // name - randomEsField(depth++) + randomEsField(depth) ); } return Map.copyOf(map); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index ae9d7b7d1b5f0..bc46189e13827 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -61,7 +61,7 @@ public static void init() { mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(); + logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, EsqlTestUtils.emptyPolicyResolution()), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 511a7ee08b5e1..8b185e013a8a5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.esql.plan.physical.LimitExec; import org.elasticsearch.xpack.esql.plan.physical.LocalSourceExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.planner.FilterTests; import org.elasticsearch.xpack.esql.planner.Mapper; import org.elasticsearch.xpack.esql.planner.PlannerUtils; @@ -41,6 +42,7 @@ import org.elasticsearch.xpack.esql.stats.Metrics; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.function.FunctionRegistry; import org.elasticsearch.xpack.ql.index.EsIndex; @@ -125,7 +127,7 @@ public void init() { .sum(); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(); + logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); @@ -299,6 +301,16 @@ public void testAnotherCountAllWithFilter() { assertThat(expected.toString(), is(esStatsQuery.query().toString())); } + /** + * Expected + * ProjectExec[[c{r}#3, c{r}#3 AS call, c_literal{r}#7]] + * \_LimitExec[500[INTEGER]] + * \_AggregateExec[[],[COUNT([2a][KEYWORD]) AS c, COUNT(1[INTEGER]) AS c_literal],FINAL,null] + * \_ExchangeExec[[count{r}#18, seen{r}#19, count{r}#20, seen{r}#21],true] + * \_EsStatsQueryExec[test], stats[Stat[name=*, type=COUNT, query=null], Stat[name=*, type=COUNT, query=null]]], + * query[{"esql_single_value":{"field":"emp_no","next":{"range":{"emp_no":{"gt":10010,"boost":1.0}}}}}] + * [count{r}#23, seen{r}#24, count{r}#25, seen{r}#26], limit[], + */ public void testMultiCountAllWithFilter() { var plan = plan(""" from test @@ -306,14 +318,19 @@ public void testMultiCountAllWithFilter() { | stats c = count(), call = count(*), c_literal = count(1) """, IS_SV_STATS); - var limit = as(plan, LimitExec.class); + var project = as(plan, ProjectExec.class); + var projections = project.projections(); + assertThat(Expressions.names(projections), contains("c", "call", "c_literal")); + var alias = as(projections.get(1), Alias.class); + assertThat(Expressions.name(alias.child()), is("c")); + var limit = as(project.child(), LimitExec.class); var agg = as(limit.child(), AggregateExec.class); assertThat(agg.getMode(), is(FINAL)); - assertThat(Expressions.names(agg.aggregates()), contains("c", "call", "c_literal")); + assertThat(Expressions.names(agg.aggregates()), contains("c", "c_literal")); var exchange = as(agg.child(), ExchangeExec.class); var esStatsQuery = as(exchange.child(), EsStatsQueryExec.class); assertThat(esStatsQuery.limit(), is(nullValue())); - assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen", "count", "seen", "count", "seen")); + assertThat(Expressions.names(esStatsQuery.output()), contains("count", "seen", "count", "seen")); var expected = wrapWithSingleQuery(QueryBuilders.rangeQuery("emp_no").gt(10010), "emp_no"); assertThat(expected.toString(), is(esStatsQuery.query().toString())); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index a22bb3b91ff0b..f63026c28279a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.aggregate.Percentile; import org.elasticsearch.xpack.esql.expression.function.aggregate.Sum; @@ -43,6 +44,8 @@ import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Grok; +import org.elasticsearch.xpack.esql.plan.logical.MvExpand; +import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.TopN; import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; @@ -124,7 +127,7 @@ public static void init() { EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(); + logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); EnrichPolicyResolution policy = AnalyzerTestUtils.loadEnrichPolicyResolution( "languages_idx", "id", @@ -305,7 +308,10 @@ public void testMultipleCombineLimits() { var value = i == limitWithMinimum ? minimum : randomIntBetween(100, 1000); plan = new Limit(EMPTY, L(value), plan); } - assertEquals(new Limit(EMPTY, L(minimum), relation), new LogicalPlanOptimizer().optimize(plan)); + assertEquals( + new Limit(EMPTY, L(minimum), relation), + new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)).optimize(plan) + ); } public static GreaterThan greaterThanOf(Expression left, Expression right) { @@ -889,6 +895,384 @@ public void testCombineOrderByThroughFilter() { as(filter.child(), EsRelation.class); } + /** + * Expected + * TopN[[Order[first_name{f}#170,ASC,LAST]],500[INTEGER]] + * \_MvExpand[first_name{f}#170] + * \_TopN[[Order[emp_no{f}#169,ASC,LAST]],500[INTEGER]] + * \_EsRelation[test][avg_worked_seconds{f}#167, birth_date{f}#168, emp_n..] + */ + public void testDontCombineOrderByThroughMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | mv_expand first_name + | sort first_name"""); + + var topN = as(plan, TopN.class); + assertThat(orderNames(topN), contains("first_name")); + var mvExpand = as(topN.child(), MvExpand.class); + topN = as(mvExpand.child(), TopN.class); + assertThat(orderNames(topN), contains("emp_no")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * Limit[500[INTEGER]] + * \_MvExpand[x{r}#159] + * \_EsqlProject[[first_name{f}#162 AS x]] + * \_Limit[500[INTEGER]] + * \_EsRelation[test][first_name{f}#162] + */ + public void testCopyDefaultLimitPastMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | rename first_name as x + | keep x + | mv_expand x + """); + + var limit = as(plan, Limit.class); + var mvExpand = as(limit.child(), MvExpand.class); + var keep = as(mvExpand.child(), EsqlProject.class); + var limitPastMvExpand = as(keep.child(), Limit.class); + assertThat(limitPastMvExpand.limit(), equalTo(limit.limit())); + as(limitPastMvExpand.child(), EsRelation.class); + } + + /** + * Expected + * Limit[10[INTEGER]] + * \_MvExpand[first_name{f}#155] + * \_EsqlProject[[first_name{f}#155, last_name{f}#156]] + * \_Limit[1[INTEGER]] + * \_EsRelation[test][first_name{f}#155, last_name{f}#156] + */ + public void testDontPushDownLimitPastMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | limit 1 + | keep first_name, last_name + | mv_expand first_name + | limit 10"""); + + var limit = as(plan, Limit.class); + assertThat(limit.limit().fold(), equalTo(10)); + var mvExpand = as(limit.child(), MvExpand.class); + var project = as(mvExpand.child(), EsqlProject.class); + limit = as(project.child(), Limit.class); + assertThat(limit.limit().fold(), equalTo(1)); + as(limit.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#141, first_name{f}#142, languages{f}#143, lll{r}#132, salary{f}#147]] + * \_TopN[[Order[salary{f}#147,DESC,FIRST], Order[first_name{f}#142,ASC,LAST]],5[INTEGER]] + * \_Limit[5[INTEGER]] + * \_MvExpand[salary{f}#147] + * \_Eval[[languages{f}#143 + 5[INTEGER] AS lll]] + * \_Filter[languages{f}#143 > 1[INTEGER]] + * \_Limit[10[INTEGER]] + * \_MvExpand[first_name{f}#142] + * \_TopN[[Order[emp_no{f}#141,DESC,FIRST]],10[INTEGER]] + * \_Filter[emp_no{f}#141 < 10006[INTEGER]] + * \_EsRelation[test][emp_no{f}#141, first_name{f}#142, languages{f}#1..] + */ + public void testMultipleMvExpandWithSortAndLimit() { + LogicalPlan plan = optimizedPlan(""" + from test + | where emp_no <= 10006 + | sort emp_no desc + | mv_expand first_name + | limit 10 + | where languages > 1 + | eval lll = languages + 5 + | mv_expand salary + | limit 5 + | sort first_name + | keep emp_no, first_name, languages, lll, salary + | sort salary desc"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(5)); + assertThat(orderNames(topN), contains("salary", "first_name")); + var limit = as(topN.child(), Limit.class); + assertThat(limit.limit().fold(), equalTo(5)); + var mvExp = as(limit.child(), MvExpand.class); + var eval = as(mvExp.child(), Eval.class); + var filter = as(eval.child(), Filter.class); + limit = as(filter.child(), Limit.class); + assertThat(limit.limit().fold(), equalTo(10)); + mvExp = as(limit.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(10)); + filter = as(topN.child(), Filter.class); + as(filter.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#350, first_name{f}#351, salary{f}#352]] + * \_TopN[[Order[salary{f}#352,ASC,LAST], Order[first_name{f}#351,ASC,LAST]],5[INTEGER]] + * \_MvExpand[first_name{f}#351] + * \_TopN[[Order[emp_no{f}#350,ASC,LAST]],10000[INTEGER]] + * \_EsRelation[employees][emp_no{f}#350, first_name{f}#351, salary{f}#352] + */ + public void testPushDownLimitThroughMultipleSort_AfterMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | mv_expand first_name + | keep emp_no, first_name, salary + | sort salary, first_name + | limit 5"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(5)); + assertThat(orderNames(topN), contains("salary", "first_name")); + var mvExp = as(topN.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(orderNames(topN), contains("emp_no")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#361, first_name{f}#362, salary{f}#363]] + * \_TopN[[Order[first_name{f}#362,ASC,LAST]],5[INTEGER]] + * \_TopN[[Order[salary{f}#363,ASC,LAST]],5[INTEGER]] + * \_MvExpand[first_name{f}#362] + * \_TopN[[Order[emp_no{f}#361,ASC,LAST]],10000[INTEGER]] + * \_EsRelation[employees][emp_no{f}#361, first_name{f}#362, salary{f}#363] + */ + public void testPushDownLimitThroughMultipleSort_AfterMvExpand2() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | mv_expand first_name + | keep emp_no, first_name, salary + | sort salary + | limit 5 + | sort first_name"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(5)); + assertThat(orderNames(topN), contains("first_name")); + topN = as(topN.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(5)); + assertThat(orderNames(topN), contains("salary")); + var mvExp = as(topN.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(orderNames(topN), contains("emp_no")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * Limit[5[INTEGER]] + * \_Aggregate[[first_name{f}#232],[MAX(salary{f}#233) AS max_s, first_name{f}#232]] + * \_Filter[ISNOTNULL(first_name{f}#232)] + * \_MvExpand[first_name{f}#232] + * \_TopN[[Order[emp_no{f}#231,ASC,LAST]],50[INTEGER]] + * \_EsRelation[employees][emp_no{f}#231, first_name{f}#232, salary{f}#233] + */ + public void testDontPushDownLimitPastAggregate_AndMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | limit 50 + | mv_expand first_name + | keep emp_no, first_name, salary + | stats max_s = max(salary) by first_name + | where first_name is not null + | limit 5"""); + + var limit = as(plan, Limit.class); + assertThat(limit.limit().fold(), equalTo(5)); + var agg = as(limit.child(), Aggregate.class); + var filter = as(agg.child(), Filter.class); + var mvExp = as(filter.child(), MvExpand.class); + var topN = as(mvExp.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(50)); + assertThat(orderNames(topN), contains("emp_no")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * Limit[5[INTEGER]] + * \_Aggregate[[first_name{f}#262],[MAX(salary{f}#263) AS max_s, first_name{f}#262]] + * \_Filter[ISNOTNULL(first_name{f}#262)] + * \_Limit[50[INTEGER]] + * \_MvExpand[first_name{f}#262] + * \_Limit[50[INTEGER]] + * \_EsRelation[employees][emp_no{f}#261, first_name{f}#262, salary{f}#263] + */ + public void testPushDown_TheRightLimit_PastMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | mv_expand first_name + | limit 50 + | keep emp_no, first_name, salary + | stats max_s = max(salary) by first_name + | where first_name is not null + | limit 5"""); + + var limit = as(plan, Limit.class); + assertThat(limit.limit().fold(), equalTo(5)); + var agg = as(limit.child(), Aggregate.class); + var filter = as(agg.child(), Filter.class); + limit = as(filter.child(), Limit.class); + assertThat(limit.limit().fold(), equalTo(50)); + var mvExp = as(limit.child(), MvExpand.class); + limit = as(mvExp.child(), Limit.class); + assertThat(limit.limit().fold(), equalTo(50)); + as(limit.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[first_name{f}#11, emp_no{f}#10, salary{f}#12, b{r}#4]] + * \_TopN[[Order[salary{f}#12,ASC,LAST]],5[INTEGER]] + * \_Eval[[100[INTEGER] AS b]] + * \_MvExpand[first_name{f}#11] + * \_TopN[[Order[first_name{f}#11,ASC,LAST]],10000[INTEGER]] + * \_EsRelation[employees][emp_no{f}#10, first_name{f}#11, salary{f}#12] + */ + public void testPushDownLimit_PastEvalAndMvExpand() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort first_name + | mv_expand first_name + | eval b = 100 + | sort salary + | limit 5 + | keep first_name, emp_no, salary, b"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(5)); + assertThat(orderNames(topN), contains("salary")); + var eval = as(topN.child(), Eval.class); + var mvExp = as(eval.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(orderNames(topN), contains("first_name")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#104, first_name{f}#105, salary{f}#106]] + * \_TopN[[Order[salary{f}#106,ASC,LAST], Order[first_name{f}#105,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#215 == [46][KEYWORD] AND WILDCARDLIKE(first_name{f}#105)] + * \_MvExpand[first_name{f}#105] + * \_TopN[[Order[emp_no{f}#104,ASC,LAST]],10000[INTEGER]] + * \_EsRelation[employees][emp_no{f}#104, first_name{f}#105, salary{f}#106] + */ + public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedField() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | mv_expand first_name + | where gender == "F" + | where first_name LIKE "R*" + | keep emp_no, first_name, salary + | sort salary, first_name + | limit 15"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(15)); + assertThat(orderNames(topN), contains("salary", "first_name")); + var filter = as(topN.child(), Filter.class); + assertThat(filter.condition(), instanceOf(And.class)); + var mvExp = as(filter.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + // the filter acts on first_name (the one used in mv_expand), so the limit 15 is not pushed down past mv_expand + // instead the default limit is added + assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(orderNames(topN), contains("emp_no")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#104, first_name{f}#105, salary{f}#106]] + * \_TopN[[Order[salary{f}#106,ASC,LAST], Order[first_name{f}#105,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#215 == [46][KEYWORD] AND salary{f}#106 > 60000[INTEGER]] + * \_MvExpand[first_name{f}#105] + * \_TopN[[Order[emp_no{f}#104,ASC,LAST]],10000[INTEGER]] + * \_EsRelation[employees][emp_no{f}#104, first_name{f}#105, salary{f}#106] + */ + public void testAddDefaultLimit_BeforeMvExpand_WithFilter_NOT_OnExpandedField() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort emp_no + | mv_expand first_name + | where gender == "F" + | where salary > 60000 + | keep emp_no, first_name, salary + | sort salary, first_name + | limit 15"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(15)); + assertThat(orderNames(topN), contains("salary", "first_name")); + var filter = as(topN.child(), Filter.class); + assertThat(filter.condition(), instanceOf(And.class)); + var mvExp = as(filter.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + // the filters after mv_expand do not act on the expanded field values, as such the limit 15 is the one being pushed down + // otherwise that limit wouldn't have pushed down and the default limit was instead being added by default before mv_expanded + assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(orderNames(topN), contains("emp_no")); + as(topN.child(), EsRelation.class); + } + + /** + * Expected + * EsqlProject[[emp_no{f}#116, first_name{f}#117 AS x, salary{f}#119]] + * \_TopN[[Order[salary{f}#119,ASC,LAST], Order[first_name{f}#117,ASC,LAST]],15[INTEGER]] + * \_Filter[gender{f}#118 == [46][KEYWORD] AND WILDCARDLIKE(first_name{f}#117)] + * \_MvExpand[first_name{f}#117] + * \_TopN[[Order[gender{f}#118,ASC,LAST]],10000[INTEGER]] + * \_EsRelation[employees][emp_no{f}#116, first_name{f}#117, gender{f}#118, sa..] + */ + public void testAddDefaultLimit_BeforeMvExpand_WithFilterOnExpandedFieldAlias() { + LogicalPlan plan = optimizedPlan(""" + from test + | sort gender + | mv_expand first_name + | rename first_name AS x + | where gender == "F" + | where x LIKE "A*" + | keep emp_no, x, salary + | sort salary, x + | limit 15"""); + + var keep = as(plan, EsqlProject.class); + var topN = as(keep.child(), TopN.class); + assertThat(topN.limit().fold(), equalTo(15)); + assertThat(orderNames(topN), contains("salary", "first_name")); + var filter = as(topN.child(), Filter.class); + assertThat(filter.condition(), instanceOf(And.class)); + var mvExp = as(filter.child(), MvExpand.class); + topN = as(mvExp.child(), TopN.class); + // the filter uses an alias ("x") to the expanded field ("first_name"), so the default limit is used and not the one provided + assertThat(topN.limit().fold(), equalTo(10000)); + assertThat(orderNames(topN), contains("gender")); + as(topN.child(), EsRelation.class); + } + private static List orderNames(TopN topN) { return topN.order().stream().map(o -> as(o.child(), NamedExpression.class).name()).toList(); } @@ -1923,6 +2307,171 @@ public void testPruneRenameOnAggBy() { var source = as(agg.child(), EsRelation.class); } + /** + * Expects + * Project[[c1{r}#2, c2{r}#4, cs{r}#6, cm{r}#8, cexp{r}#10]] + * \_Eval[[c1{r}#2 AS c2, c1{r}#2 AS cs, c1{r}#2 AS cm, c1{r}#2 AS cexp]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[],[COUNT([2a][KEYWORD]) AS c1]] + * \_EsRelation[test][_meta_field{f}#17, emp_no{f}#11, first_name{f}#12, ..] + */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100634") + public void testEliminateDuplicateAggsCountAll() { + var plan = plan(""" + from test + | stats c1 = count(1), c2 = count(2), cs = count(*), cm = count(), cexp = count("123") + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("c1", "c2", "cs", "cm", "cexp")); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.names(fields), contains("c2", "cs", "cm", "cexp")); + for (Alias field : fields) { + assertThat(Expressions.name(field.child()), is("c1")); + } + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + assertThat(Expressions.names(aggs), contains("c1")); + aggFieldName(aggs.get(0), Count.class, "*"); + var source = as(agg.child(), EsRelation.class); + } + + /** + * Expects + * Project[[c1{r}#7, cx{r}#10, cs{r}#12, cy{r}#15]] + * \_Eval[[c1{r}#7 AS cx, c1{r}#7 AS cs, c1{r}#7 AS cy]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[],[COUNT([2a][KEYWORD]) AS c1]] + * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] + */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100634") + public void testEliminateDuplicateAggsWithAliasedFields() { + var plan = plan(""" + from test + | eval x = 1 + | eval y = x + | stats c1 = count(1), cx = count(x), cs = count(*), cy = count(y) + """); + + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("c1", "cx", "cs", "cy")); + var eval = as(project.child(), Eval.class); + var fields = eval.fields(); + assertThat(Expressions.names(fields), contains("cx", "cs", "cy")); + for (Alias field : fields) { + assertThat(Expressions.name(field.child()), is("c1")); + } + var limit = as(eval.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + assertThat(Expressions.names(aggs), contains("c1")); + aggFieldName(aggs.get(0), Count.class, "*"); + var source = as(agg.child(), EsRelation.class); + } + + /** + * Expects + * Project[[min{r}#1385, max{r}#1388, min{r}#1385 AS min2, max{r}#1388 AS max2, gender{f}#1398]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[gender{f}#1398],[MIN(salary{f}#1401) AS min, MAX(salary{f}#1401) AS max, gender{f}#1398]] + * \_EsRelation[test][_meta_field{f}#1402, emp_no{f}#1396, first_name{f}#..] + */ + public void testEliminateDuplicateAggsMixed() { + var plan = plan(""" + from test + | stats min = min(salary), max = max(salary), min2 = min(salary), max2 = max(salary) by gender + """); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(Expressions.names(projections), contains("min", "max", "min2", "max2", "gender")); + as(projections.get(0), ReferenceAttribute.class); + as(projections.get(1), ReferenceAttribute.class); + assertThat(Expressions.name(aliased(projections.get(2), ReferenceAttribute.class)), is("min")); + assertThat(Expressions.name(aliased(projections.get(3), ReferenceAttribute.class)), is("max")); + + var limit = as(project.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + assertThat(Expressions.names(aggs), contains("min", "max", "gender")); + aggFieldName(aggs.get(0), Min.class, "salary"); + aggFieldName(aggs.get(1), Max.class, "salary"); + var source = as(agg.child(), EsRelation.class); + } + + /** + * Expects + * EsqlProject[[a{r}#5, c{r}#8]] + * \_Eval[[null[INTEGER] AS x]] + * \_EsRelation[test][_meta_field{f}#15, emp_no{f}#9, first_name{f}#10, g..] + */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100634") + public void testEliminateDuplicateAggWithNull() { + var plan = plan(""" + from test + | eval x = null + 1 + | stats a = avg(x), c = count(x) + """); + fail("Awaits fix"); + } + + /** + * Expects + * Project[[max(x){r}#11, max(x){r}#11 AS max(y), max(x){r}#11 AS max(z)]] + * \_Limit[500[INTEGER]] + * \_Aggregate[[],[MAX(salary{f}#21) AS max(x)]] + * \_EsRelation[test][_meta_field{f}#22, emp_no{f}#16, first_name{f}#17, ..] + */ + public void testEliminateDuplicateAggsNonCount() { + var plan = plan(""" + from test + | eval x = salary + | eval y = x + | eval z = y + | stats max(x), max(y), max(z) + """); + + var project = as(plan, Project.class); + var projections = project.projections(); + assertThat(Expressions.names(projections), contains("max(x)", "max(y)", "max(z)")); + as(projections.get(0), ReferenceAttribute.class); + assertThat(Expressions.name(aliased(projections.get(1), ReferenceAttribute.class)), is("max(x)")); + assertThat(Expressions.name(aliased(projections.get(2), ReferenceAttribute.class)), is("max(x)")); + + var limit = as(project.child(), Limit.class); + var agg = as(limit.child(), Aggregate.class); + var aggs = agg.aggregates(); + assertThat(Expressions.names(aggs), contains("max(x)")); + aggFieldName(aggs.get(0), Max.class, "salary"); + var source = as(agg.child(), EsRelation.class); + } + + /** + * Expected + * Limit[2[INTEGER]] + * \_Filter[a{r}#6 > 2[INTEGER]] + * \_MvExpand[a{r}#2,a{r}#6] + * \_Row[[[1, 2, 3][INTEGER] AS a]] + */ + public void testMvExpandFoldable() { + LogicalPlan plan = optimizedPlan(""" + row a = [1, 2, 3] + | mv_expand a + | where a > 2 + | limit 2"""); + + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var expand = as(filter.child(), MvExpand.class); + assertThat(filter.condition(), instanceOf(GreaterThan.class)); + var filterProp = ((GreaterThan) filter.condition()).left(); + assertTrue(expand.expanded().semanticEquals(filterProp)); + assertFalse(expand.target().semanticEquals(filterProp)); + var row = as(expand.child(), Row.class); + } + private T aliased(Expression exp, Class clazz) { var alias = as(exp, Alias.class); return as(alias.child(), clazz); @@ -1932,7 +2481,8 @@ private void aggFieldName(Expression exp, Class var alias = as(exp, Alias.class); var af = as(alias.child(), aggType); var field = af.field(); - assertThat(Expressions.name(field), is(fieldName)); + var name = field.foldable() ? BytesRefs.toString(field.fold()) : Expressions.name(field); + assertThat(name, is(fieldName)); } private LogicalPlan optimizedPlan(String query) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index f0c3b9d541b16..1f2bde2526fab 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -158,7 +158,7 @@ public void init() { .sum(); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(); + logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(config)); FunctionRegistry functionRegistry = new EsqlFunctionRegistry(); mapper = new Mapper(functionRegistry); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 5047688610120..985127731a0d1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -613,12 +613,14 @@ public void testDissectPattern() { assertEquals("", dissect.parser().appendSeparator()); assertEquals(List.of(referenceAttribute("foo", KEYWORD)), dissect.extractedFields()); - cmd = processingCommand("dissect a \"%{foo}\" append_separator=\",\""); - assertEquals(Dissect.class, cmd.getClass()); - dissect = (Dissect) cmd; - assertEquals("%{foo}", dissect.parser().pattern()); - assertEquals(",", dissect.parser().appendSeparator()); - assertEquals(List.of(referenceAttribute("foo", KEYWORD)), dissect.extractedFields()); + for (String separatorName : List.of("append_separator", "APPEND_SEPARATOR", "AppEnd_SeparAtor")) { + cmd = processingCommand("dissect a \"%{foo}\" " + separatorName + "=\",\""); + assertEquals(Dissect.class, cmd.getClass()); + dissect = (Dissect) cmd; + assertEquals("%{foo}", dissect.parser().pattern()); + assertEquals(",", dissect.parser().appendSeparator()); + assertEquals(List.of(referenceAttribute("foo", KEYWORD)), dissect.extractedFields()); + } for (Tuple queryWithUnexpectedCmd : List.of( Tuple.tuple("from a | dissect foo \"\"", "[]"), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java index db3d2419ee2ee..774ac24d3cd02 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/FilterTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -70,7 +71,7 @@ public static void init() { mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - logicalOptimizer = new LogicalPlanOptimizer(); + logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(EsqlTestUtils.TEST_CFG)); physicalPlanOptimizer = new PhysicalPlanOptimizer(new PhysicalOptimizerContext(EsqlTestUtils.TEST_CFG)); mapper = new Mapper(false); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java index cceb3a2ab835b..8970617548016 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plugin/DataNodeRequestTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalPlanOptimizer; @@ -165,7 +166,7 @@ static LogicalPlan parse(String query) { Map mapping = loadMapping("mapping-basic.json"); EsIndex test = new EsIndex("test", mapping); IndexResolution getIndexResult = IndexResolution.valid(test); - var logicalOptimizer = new LogicalPlanOptimizer(); + var logicalOptimizer = new LogicalPlanOptimizer(new LogicalOptimizerContext(TEST_CFG)); var analyzer = new Analyzer( new AnalyzerContext(EsqlTestUtils.TEST_CFG, new EsqlFunctionRegistry(), getIndexResult, emptyPolicyResolution()), TEST_VERIFIER diff --git a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java index 21f0e846ebfa6..fb8d68541d4d1 100644 --- a/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java +++ b/x-pack/plugin/fleet/src/main/java/org/elasticsearch/xpack/fleet/Fleet.java @@ -23,30 +23,19 @@ import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.indices.ExecutorNames; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemDataStreamDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.SystemIndexDescriptor.Type; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; @@ -100,28 +89,13 @@ public class Fleet extends Plugin implements SystemIndexPlugin { private static final int FLEET_ACTIONS_RESULTS_MAPPINGS_VERSION = 1; @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { FleetTemplateRegistry registry = new FleetTemplateRegistry( - environment.settings(), - clusterService, - threadPool, - client, - xContentRegistry + services.environment().settings(), + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() ); registry.initialize(); return List.of(); diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java index f366a18c7393f..0b1693e0c3712 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java @@ -89,9 +89,7 @@ public void testTimestampRangeRecalculatedOnStalePrimaryAllocation() throws IOEx assertThat(client().prepareDelete("index", indexResponse.getId()).get().status(), equalTo(RestStatus.OK)); - assertAcked( - client().execute(FreezeIndexAction.INSTANCE, new FreezeRequest("index").waitForActiveShards(ActiveShardCount.ONE)).actionGet() - ); + assertAcked(client().execute(FreezeIndexAction.INSTANCE, new FreezeRequest("index").waitForActiveShards(ActiveShardCount.ONE))); assertThat( clusterAdmin().prepareState().get().getState().metadata().index("index").getTimestampRange(), @@ -234,8 +232,7 @@ public void testRetryPointInTime() throws Exception { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(OpenPointInTimeAction.INSTANCE, openPointInTimeRequest).actionGet().getPointInTimeId(); try { - SearchResponse resp = client().prepareSearch() - .setIndices(indexName) + SearchResponse resp = prepareSearch().setIndices(indexName) .setPreference(null) .setPointInTime(new PointInTimeBuilder(pitId)) .get(); @@ -244,8 +241,7 @@ public void testRetryPointInTime() throws Exception { assertHitCount(resp, numDocs); internalCluster().restartNode(assignedNode); ensureGreen(indexName); - resp = client().prepareSearch() - .setIndices(indexName) + resp = prepareSearch().setIndices(indexName) .setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference(null) @@ -287,8 +283,7 @@ public void testPointInTimeWithDeletedIndices() { try { indicesAdmin().prepareDelete("index-1").get(); // Return partial results if allow partial search result is allowed - SearchResponse resp = client().prepareSearch() - .setPreference(null) + SearchResponse resp = prepareSearch().setPreference(null) .setAllowPartialSearchResults(true) .setPointInTime(new PointInTimeBuilder(pitId)) .get(); @@ -297,10 +292,7 @@ public void testPointInTimeWithDeletedIndices() { // Fails if allow partial search result is not allowed expectThrows( ElasticsearchException.class, - client().prepareSearch() - .setPreference(null) - .setAllowPartialSearchResults(false) - .setPointInTime(new PointInTimeBuilder(pitId))::get + prepareSearch().setPreference(null).setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pitId))::get ); } finally { client().execute(ClosePointInTimeAction.INSTANCE, new ClosePointInTimeRequest(pitId)).actionGet(); @@ -323,7 +315,7 @@ public void testOpenPointInTimeWithNoIndexMatched() { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(OpenPointInTimeAction.INSTANCE, openPointInTimeRequest).actionGet().getPointInTimeId(); try { - SearchResponse resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, numDocs); } finally { @@ -337,7 +329,7 @@ public void testOpenPointInTimeWithNoIndexMatched() { ); final String pitId = client().execute(OpenPointInTimeAction.INSTANCE, openPointInTimeRequest).actionGet().getPointInTimeId(); try { - SearchResponse resp = client().prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); + SearchResponse resp = prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)).get(); assertNoFailures(resp); assertHitCount(resp, 0); } finally { diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java index a4ec5222ffcc2..6b028e5ea0815 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java @@ -287,7 +287,7 @@ public void testUnfreezeClosedIndices() { client().execute( FreezeIndexAction.INSTANCE, new FreezeRequest("idx*").setFreeze(false).indicesOptions(IndicesOptions.strictExpand()) - ).actionGet() + ) ); ClusterStateResponse stateResponse = clusterAdmin().prepareState().get(); assertEquals(IndexMetadata.State.CLOSE, stateResponse.getState().getMetadata().index("idx-closed").getState()); @@ -479,7 +479,7 @@ public void testIgnoreUnavailable() { new FreezeRequest("idx*", "not_available").indicesOptions( IndicesOptions.fromParameters(null, "true", null, null, IndicesOptions.strictExpandOpen()) ) - ).actionGet() + ) ); assertIndexFrozen("idx"); assertEquals(IndexMetadata.State.CLOSE, clusterAdmin().prepareState().get().getState().metadata().index("idx-close").getState()); diff --git a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java index 943394f326653..c4c91dac9a513 100644 --- a/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java +++ b/x-pack/plugin/identity-provider/src/main/java/org/elasticsearch/xpack/idp/IdentityProviderPlugin.java @@ -11,31 +11,18 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.ssl.X509KeyPairSettings; @@ -86,35 +73,24 @@ public class IdentityProviderPlugin extends Plugin implements ActionPlugin { private Settings settings; @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - settings = environment.settings(); + public Collection createComponents(PluginServices services) { + settings = services.environment().settings(); enabled = ENABLED_SETTING.get(settings); if (enabled == false) { return List.of(); } SamlInit.initialize(); - final SamlServiceProviderIndex index = new SamlServiceProviderIndex(client, clusterService); - final SecurityContext securityContext = new SecurityContext(settings, threadPool.getThreadContext()); + final SamlServiceProviderIndex index = new SamlServiceProviderIndex(services.client(), services.clusterService()); + final SecurityContext securityContext = new SecurityContext(settings, services.threadPool().getThreadContext()); final ServiceProviderDefaults serviceProviderDefaults = ServiceProviderDefaults.forSettings(settings); - final ApplicationActionsResolver actionsResolver = new ApplicationActionsResolver(settings, serviceProviderDefaults, client); - final UserPrivilegeResolver userPrivilegeResolver = new UserPrivilegeResolver(client, securityContext, actionsResolver); + final ApplicationActionsResolver actionsResolver = new ApplicationActionsResolver( + settings, + serviceProviderDefaults, + services.client() + ); + final UserPrivilegeResolver userPrivilegeResolver = new UserPrivilegeResolver(services.client(), securityContext, actionsResolver); final SamlServiceProviderFactory serviceProviderFactory = new SamlServiceProviderFactory(serviceProviderDefaults); final SamlServiceProviderResolver registeredServiceProviderResolver = new SamlServiceProviderResolver( @@ -123,13 +99,13 @@ public Collection createComponents( serviceProviderFactory ); final WildcardServiceProviderResolver wildcardServiceProviderResolver = WildcardServiceProviderResolver.create( - environment, - resourceWatcherService, - scriptService, + services.environment(), + services.resourceWatcherService(), + services.scriptService(), serviceProviderFactory ); final SamlIdentityProvider idp = SamlIdentityProvider.builder(registeredServiceProviderResolver, wildcardServiceProviderResolver) - .fromSettings(environment) + .fromSettings(services.environment()) .serviceProviderDefaults(serviceProviderDefaults) .build(); diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle index a1a2a5129e3f7..111496669afe3 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.internal.info.BuildParams +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -16,12 +17,12 @@ tasks.register('leader-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.target_cluster', 'leader' /* To support taking index snapshots, we have to set path.repo setting */ - systemProperty 'tests.path.repo', repoDir.absolutePath + nonInputProperties.systemProperty 'tests.path.repo', repoDir.absolutePath } testClusters.matching { it.name == 'leader-cluster' }.configureEach { testDistribution = 'DEFAULT' - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.ccr.enabled', 'true' setting 'xpack.security.enabled', 'false' setting 'xpack.watcher.enabled', 'false' @@ -39,12 +40,12 @@ tasks.register('follow-cluster', RestIntegTestTask) { nonInputProperties.systemProperty 'tests.leader_remote_cluster_seed', "${-> testClusters.'leader-cluster'.getAllTransportPortURI().get(0)}" /* To support taking index snapshots, we have to set path.repo setting */ - systemProperty 'tests.path.repo', repoDir.absolutePath + nonInputProperties.systemProperty 'tests.path.repo', repoDir.absolutePath } testClusters.matching{ it.name == 'follow-cluster' }.configureEach { testDistribution = 'DEFAULT' - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.ccr.enabled', 'true' setting 'xpack.security.enabled', 'false' setting 'xpack.watcher.enabled', 'false' @@ -52,11 +53,11 @@ testClusters.matching{ it.name == 'follow-cluster' }.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.poll_interval', '1000ms' setting 'cluster.remote.leader_cluster.seeds', - { "\"${testClusters.'leader-cluster'.getAllTransportPortURI().get(0)}\"" } + { "\"${testClusters.'leader-cluster'.getAllTransportPortURI().get(0)}\"" }, IGNORE_VALUE } tasks.named("check").configure { dependsOn 'follow-cluster' } // Security is explicitly disabled for follow-cluster and leader-cluster, do not run these in FIPS mode tasks.withType(Test).configureEach { enabled = BuildParams.inFipsJvm == false -} \ No newline at end of file +} diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index 523bf5d04ae4f..ef20d57d83f59 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.info.BuildParams +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' @@ -12,14 +13,14 @@ File repoDir = file("$buildDir/testclusters/repo") tasks.named("javaRestTest").configure { /* To support taking index snapshots, we have to set path.repo setting */ - systemProperty 'tests.path.repo', repoDir + nonInputProperties.systemProperty 'tests.path.repo', repoDir } testClusters.configureEach { testDistribution = 'DEFAULT' numberOfNodes = 4 - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' setting 'xpack.security.enabled', 'false' diff --git a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/UpdateSettingsStepTests.java b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/UpdateSettingsStepTests.java index 06ac4673264f3..4e8df685a25a7 100644 --- a/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/UpdateSettingsStepTests.java +++ b/x-pack/plugin/ilm/src/internalClusterTest/java/org/elasticsearch/xpack/ilm/UpdateSettingsStepTests.java @@ -7,30 +7,18 @@ package org.elasticsearch.xpack.ilm; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ilm.Step.StepKey; import org.elasticsearch.xpack.core.ilm.UpdateSettingsStep; import org.junit.After; @@ -39,7 +27,6 @@ import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.function.Supplier; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xpack.ilm.UpdateSettingsStepTests.SettingsTestingService.INVALID_VALUE; @@ -62,22 +49,7 @@ public void onIndexModule(IndexModule module) { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { return List.of(service); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index 6409c2b72e1f1..53e4d3de463fd 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -10,37 +10,25 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.cluster.action.MigrateToDataTiersAction; @@ -154,48 +142,39 @@ protected XPackLicenseState getLicenseState() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { final List components = new ArrayList<>(); ILMHistoryTemplateRegistry ilmTemplateRegistry = new ILMHistoryTemplateRegistry( settings, - clusterService, - threadPool, - client, - xContentRegistry + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() ); ilmTemplateRegistry.initialize(); - ilmHistoryStore.set(new ILMHistoryStore(new OriginSettingClient(client, INDEX_LIFECYCLE_ORIGIN), clusterService, threadPool)); + ilmHistoryStore.set( + new ILMHistoryStore( + new OriginSettingClient(services.client(), INDEX_LIFECYCLE_ORIGIN), + services.clusterService(), + services.threadPool() + ) + ); /* * Here we use threadPool::absoluteTimeInMillis rather than System::currentTimeInMillis because snapshot start time is set using * ThreadPool.absoluteTimeInMillis(). ThreadPool.absoluteTimeInMillis() returns a cached time that can be several hundred * milliseconds behind System.currentTimeMillis(). The result is that a snapshot taken after a policy is created can have a start * time that is before the policy's (or action's) start time if System::currentTimeInMillis is used here. */ - LongSupplier nowSupplier = threadPool::absoluteTimeInMillis; + LongSupplier nowSupplier = services.threadPool()::absoluteTimeInMillis; indexLifecycleInitialisationService.set( new IndexLifecycleService( settings, - client, - clusterService, - threadPool, + services.client(), + services.clusterService(), + services.threadPool(), getClock(), nowSupplier, - xContentRegistry, + services.xContentRegistry(), ilmHistoryStore.get(), getLicenseState() ) @@ -204,15 +183,17 @@ public Collection createComponents( ilmHealthIndicatorService.set( new IlmHealthIndicatorService( - clusterService, + services.clusterService(), new IlmHealthIndicatorService.StagnatingIndicesFinder( - clusterService, + services.clusterService(), IlmHealthIndicatorService.RULES_BY_ACTION_CONFIG.values(), System::currentTimeMillis ) ) ); - reservedLifecycleAction.set(new ReservedLifecycleAction(xContentRegistry, client, XPackPlugin.getSharedLicenseState())); + reservedLifecycleAction.set( + new ReservedLifecycleAction(services.xContentRegistry(), services.client(), XPackPlugin.getSharedLicenseState()) + ); return components; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index f38f90f773b7f..2f0f95cf8a911 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -10,11 +10,8 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; @@ -23,24 +20,15 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.InferenceServicePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.inference.action.DeleteInferenceModelAction; import org.elasticsearch.xpack.inference.action.GetInferenceModelAction; @@ -108,25 +96,12 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - httpManager.set(HttpClientManager.create(settings, threadPool, clusterService)); - httpRequestSenderFactory.set(new HttpRequestSenderFactory(threadPool, httpManager.get(), clusterService, settings)); - ModelRegistry modelRegistry = new ModelRegistry(client); + public Collection createComponents(PluginServices services) { + httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService())); + httpRequestSenderFactory.set( + new HttpRequestSenderFactory(services.threadPool(), httpManager.get(), services.clusterService(), settings) + ); + ModelRegistry modelRegistry = new ModelRegistry(services.client()); return List.of(modelRegistry); } diff --git a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java index 4d5b572583bd1..bcac3f12fd131 100644 --- a/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java +++ b/x-pack/plugin/mapper-aggregate-metric/src/main/java/org/elasticsearch/xpack/aggregatemetric/aggregations/support/AggregateMetricsValuesSourceType.java @@ -11,7 +11,6 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationErrors; import org.elasticsearch.search.aggregations.UnsupportedAggregationOnDownsampledIndex; -import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -19,6 +18,7 @@ import org.elasticsearch.xpack.aggregatemetric.fielddata.IndexAggregateDoubleMetricFieldData; import java.util.Locale; +import java.util.function.LongSupplier; public enum AggregateMetricsValuesSourceType implements ValuesSourceType { @@ -60,7 +60,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { throw new IllegalArgumentException("Can't apply missing values on a " + valuesSource.getClass()); } diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 9723276c827fc..6c8462c9e4948 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -30,6 +30,8 @@ import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.ConstantIndexFieldData; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.ConstantFieldType; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; @@ -133,6 +135,49 @@ public String familyTypeName() { return KeywordFieldMapper.CONTENT_TYPE; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + // TODO build a constant block directly + if (value == null) { + return BlockDocValuesReader.nulls(); + } + BytesRef bytes = new BytesRef(value); + return context -> new BlockDocValuesReader() { + private int docId; + + @Override + public int docID() { + return docId; + } + + @Override + public BlockLoader.BytesRefBuilder builder(BlockLoader.BuilderFactory factory, int expectedCount) { + return factory.bytesRefs(expectedCount); + } + + @Override + public BlockLoader.Block readValues(BlockLoader.BuilderFactory factory, BlockLoader.Docs docs) { + try (BlockLoader.BytesRefBuilder builder = builder(factory, docs.count())) { + for (int i = 0; i < docs.count(); i++) { + builder.appendBytesRef(bytes); + } + return builder.build(); + } + } + + @Override + public void readValuesFromSingleDoc(int docId, BlockLoader.Builder builder) { + this.docId = docId; + ((BlockLoader.BytesRefBuilder) builder).appendBytesRef(bytes); + } + + @Override + public String toString() { + return "ConstantKeyword"; + } + }; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new ConstantIndexFieldData.Builder( diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index 859168e154ff8..aaa28e28b72c9 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -7,11 +7,18 @@ package org.elasticsearch.xpack.constantkeyword.mapper; +import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.LuceneDocument; @@ -21,7 +28,9 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.TestBlock; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin; import org.elasticsearch.xpack.constantkeyword.mapper.ConstantKeywordFieldMapper.ConstantKeywordFieldType; @@ -30,10 +39,13 @@ import java.io.IOException; import java.util.Collection; import java.util.List; +import java.util.Set; +import java.util.function.Function; import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPING_TOTAL_FIELDS_LIMIT_SETTING; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.nullValue; public class ConstantKeywordFieldMapperTests extends MapperTestCase { @@ -212,6 +224,62 @@ protected boolean allowsNullValues() { return false; // null is an error for constant keyword } + /** + * Test loading blocks when there is no defined value. This is allowed + * for newly created indices that haven't received any documents that + * contain the field. + */ + public void testNullValueBlockLoaderReadValues() throws IOException { + testNullBlockLoader(blockReader -> (TestBlock) blockReader.readValues(TestBlock.FACTORY, TestBlock.docs(0))); + } + + /** + * Test loading blocks when there is no defined value. This is allowed + * for newly created indices that haven't received any documents that + * contain the field. + */ + public void testNullValueBlockLoaderReadValuesFromSingleDoc() throws IOException { + testNullBlockLoader(blockReader -> { + TestBlock block = (TestBlock) blockReader.builder(TestBlock.FACTORY, 1); + blockReader.readValuesFromSingleDoc(0, block); + return block; + }); + } + + private void testNullBlockLoader(CheckedFunction body) throws IOException { + MapperService mapper = createMapperService(syntheticSourceMapping(b -> { + b.startObject("field"); + b.field("type", "constant_keyword"); + b.endObject(); + })); + BlockLoader loader = mapper.fieldType("field").blockLoader(new MappedFieldType.BlockLoaderContext() { + @Override + public String indexName() { + throw new UnsupportedOperationException(); + } + + @Override + public SearchLookup lookup() { + throw new UnsupportedOperationException(); + } + + @Override + public Set sourcePaths(String name) { + return mapper.mappingLookup().sourcePaths(name); + } + }); + try (Directory directory = newDirectory()) { + RandomIndexWriter iw = new RandomIndexWriter(random(), directory); + LuceneDocument doc = mapper.documentMapper().parse(source(b -> {})).rootDoc(); + iw.addDocument(doc); + iw.close(); + try (DirectoryReader reader = DirectoryReader.open(directory)) { + TestBlock block = body.apply(loader.reader(reader.leaves().get(0))); + assertThat(block.get(0), nullValue()); + } + } + } + @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { assertFalse("constant_keyword doesn't support ignore_malformed", ignoreMalformed); @@ -237,6 +305,11 @@ protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); } + @Override + protected Function loadBlockExpected() { + return v -> ((BytesRef) v).utf8ToString(); + } + public void testNullValueSyntheticSource() throws IOException { DocumentMapper mapper = createDocumentMapper(syntheticSourceMapping(b -> { b.startObject("field"); diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index ca23799300f24..62b02f5a3d850 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -24,6 +24,9 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; +import org.elasticsearch.index.mapper.BlockSourceReader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -313,6 +316,26 @@ public Query rangeQuery( return query; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (indexMode == IndexMode.TIME_SERIES && metricType == TimeSeriesParams.MetricType.COUNTER) { + // Counters are not supported by ESQL so we load them in null + return BlockDocValuesReader.nulls(); + } + if (hasDocValues()) { + return BlockDocValuesReader.longs(name()); + } + return BlockSourceReader.longs(new SourceValueFetcher(blContext.sourcePaths(name()), nullValueFormatted) { + @Override + protected Object parseSourceValue(Object value) { + if (value.equals("")) { + return nullValueFormatted; + } + return parseUnsignedLong(value); + } + }); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { FielddataOperation operation = fieldDataContext.fielddataOperation(); diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java index 54a1edd88eb6b..95fe8f0a530ba 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapperTests.java @@ -30,6 +30,7 @@ import java.math.BigInteger; import java.util.Collection; import java.util.List; +import java.util.function.Function; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; @@ -365,6 +366,20 @@ protected IngestScriptSupport ingestScriptSupport() { throw new AssumptionViolatedException("not supported"); } + @Override + protected Function loadBlockExpected() { + return v -> { + // Numbers are in the block as a long but the test needs to compare them to their BigInteger value parsed from xcontent. + if (v instanceof BigInteger ul) { + if (ul.bitLength() < Long.SIZE) { + return ul.longValue() ^ Long.MIN_VALUE; + } + return ul.subtract(BigInteger.ONE.shiftLeft(Long.SIZE - 1)).longValue(); + } + return ((Long) v).longValue() ^ Long.MIN_VALUE; + }; + } + final class NumberSyntheticSourceSupport implements SyntheticSourceSupport { private final BigInteger nullValue = usually() ? null : BigInteger.valueOf(randomNonNegativeLong()); diff --git a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java index 3594dde42229b..0b0af8f8f9acf 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java +++ b/x-pack/plugin/mapper-unsigned-long/src/test/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongTests.java @@ -41,7 +41,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.range; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -99,12 +99,11 @@ public void testSort() { for (String index : new String[] { "idx", "idx-sort" }) { // asc sort { - SearchResponse response = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) .get(); - assertSearchResponse(response); + assertNoFailures(response); SearchHit[] hits = response.getHits().getHits(); assertEquals(hits.length, numDocs); int i = 0; @@ -114,12 +113,11 @@ public void testSort() { } // desc sort { - SearchResponse response = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.DESC) .get(); - assertSearchResponse(response); + assertNoFailures(response); SearchHit[] hits = response.getHits().getHits(); assertEquals(hits.length, numDocs); int i = numDocs - 1; @@ -129,13 +127,12 @@ public void testSort() { } // asc sort with search_after as Long { - SearchResponse response = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) .searchAfter(new Long[] { 100L }) .get(); - assertSearchResponse(response); + assertNoFailures(response); SearchHit[] hits = response.getHits().getHits(); assertEquals(hits.length, 7); int i = 3; @@ -145,13 +142,12 @@ public void testSort() { } // asc sort with search_after as BigInteger { - SearchResponse response = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) .searchAfter(new BigInteger[] { new BigInteger("18446744073709551614") }) .get(); - assertSearchResponse(response); + assertNoFailures(response); SearchHit[] hits = response.getHits().getHits(); assertEquals(hits.length, 2); int i = 8; @@ -161,13 +157,12 @@ public void testSort() { } // asc sort with search_after as BigInteger in String format { - SearchResponse response = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) .searchAfter(new String[] { "18446744073709551614" }) .get(); - assertSearchResponse(response); + assertNoFailures(response); SearchHit[] hits = response.getHits().getHits(); assertEquals(hits.length, 2); int i = 8; @@ -177,8 +172,7 @@ public void testSort() { } // asc sort with search_after of negative value should fail { - SearchRequestBuilder srb = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchRequestBuilder srb = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) .searchAfter(new Long[] { -1L }); @@ -187,8 +181,7 @@ public void testSort() { } // asc sort with search_after of value>=2^64 should fail { - SearchRequestBuilder srb = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchRequestBuilder srb = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.ASC) .searchAfter(new BigInteger[] { new BigInteger("18446744073709551616") }); @@ -197,13 +190,12 @@ public void testSort() { } // desc sort with search_after as BigInteger { - SearchResponse response = client().prepareSearch(index) - .setQuery(QueryBuilders.matchAllQuery()) + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()) .setSize(numDocs) .addSort("ul_field", SortOrder.DESC) .searchAfter(new BigInteger[] { new BigInteger("18446744073709551615") }) .get(); - assertSearchResponse(response); + assertNoFailures(response); SearchHit[] hits = response.getHits().getHits(); assertEquals(hits.length, 8); int i = 7; @@ -217,8 +209,8 @@ public void testSort() { public void testAggs() { // terms agg { - SearchResponse response = client().prepareSearch("idx").setSize(0).addAggregation(terms("ul_terms").field("ul_field")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(terms("ul_terms").field("ul_field")).get(); + assertNoFailures(response); Terms terms = response.getAggregations().get("ul_terms"); long[] expectedBucketDocCounts = { 2, 2, 2, 1, 1, 1, 1 }; @@ -240,11 +232,10 @@ public void testAggs() { // histogram agg { - SearchResponse response = client().prepareSearch("idx") - .setSize(0) + SearchResponse response = prepareSearch("idx").setSize(0) .addAggregation(histogram("ul_histo").field("ul_field").interval(9E18).minDocCount(0)) .get(); - assertSearchResponse(response); + assertNoFailures(response); Histogram histo = response.getAggregations().get("ul_histo"); long[] expectedBucketDocCounts = { 3, 3, 4 }; @@ -259,13 +250,12 @@ public void testAggs() { // range agg { - SearchResponse response = client().prepareSearch("idx") - .setSize(0) + SearchResponse response = prepareSearch("idx").setSize(0) .addAggregation( range("ul_range").field("ul_field").addUnboundedTo(9.0E18).addRange(9.0E18, 1.8E19).addUnboundedFrom(1.8E19) ) .get(); - assertSearchResponse(response); + assertNoFailures(response); Range range = response.getAggregations().get("ul_range"); long[] expectedBucketDocCounts = { 3, 3, 4 }; @@ -280,23 +270,23 @@ public void testAggs() { // sum agg { - SearchResponse response = client().prepareSearch("idx").setSize(0).addAggregation(sum("ul_sum").field("ul_field")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(sum("ul_sum").field("ul_field")).get(); + assertNoFailures(response); Sum sum = response.getAggregations().get("ul_sum"); double expectedSum = Arrays.stream(values).mapToDouble(Number::doubleValue).sum(); assertEquals(expectedSum, sum.value(), 0.001); } // max agg { - SearchResponse response = client().prepareSearch("idx").setSize(0).addAggregation(max("ul_max").field("ul_field")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(max("ul_max").field("ul_field")).get(); + assertNoFailures(response); Max max = response.getAggregations().get("ul_max"); assertEquals(1.8446744073709551615E19, max.value(), 0.001); } // min agg { - SearchResponse response = client().prepareSearch("idx").setSize(0).addAggregation(min("ul_min").field("ul_field")).get(); - assertSearchResponse(response); + SearchResponse response = prepareSearch("idx").setSize(0).addAggregation(min("ul_min").field("ul_field")).get(); + assertNoFailures(response); Min min = response.getAggregations().get("ul_min"); assertEquals(0, min.value(), 0.001); } @@ -305,11 +295,7 @@ public void testAggs() { public void testSortDifferentFormatsShouldFail() { Exception exception = expectThrows( SearchPhaseExecutionException.class, - () -> client().prepareSearch() - .setIndices("idx", "idx2") - .setQuery(QueryBuilders.matchAllQuery()) - .addSort("ul_field", SortOrder.ASC) - .get() + () -> prepareSearch().setIndices("idx", "idx2").setQuery(QueryBuilders.matchAllQuery()).addSort("ul_field", SortOrder.ASC).get() ); assertEquals( exception.getCause().getMessage(), @@ -318,17 +304,15 @@ public void testSortDifferentFormatsShouldFail() { } public void testRangeQuery() { - SearchResponse response = client().prepareSearch("idx") - .setSize(0) + SearchResponse response = prepareSearch("idx").setSize(0) .setQuery(new RangeQueryBuilder("ul_field").to("9.0E18").includeUpper(false)) .get(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - response = client().prepareSearch("idx") - .setSize(0) + response = prepareSearch("idx").setSize(0) .setQuery(new RangeQueryBuilder("ul_field").from("9.0E18").to("1.8E19").includeUpper(false)) .get(); assertThat(response.getHits().getTotalHits().value, equalTo(3L)); - response = client().prepareSearch("idx").setSize(0).setQuery(new RangeQueryBuilder("ul_field").from("1.8E19")).get(); + response = prepareSearch("idx").setSize(0).setQuery(new RangeQueryBuilder("ul_field").from("1.8E19")).get(); assertThat(response.getHits().getTotalHits().value, equalTo(4L)); } } diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java index 9e88c516576c2..f4fb83fd9a91c 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapper.java @@ -39,6 +39,8 @@ import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -290,6 +292,12 @@ protected BytesRef indexedValueForSearch(Object value) { return encodeVersion(valueAsString).bytesRef; } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + failIfNoDocValues(); + return BlockDocValuesReader.bytesRefsFromOrds(name()); + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { return new SortedSetOrdinalsIndexFieldData.Builder(name(), CoreValuesSourceType.KEYWORD, VersionStringDocValuesField::new); diff --git a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java index d94a632ff9787..5653ed7f4302f 100644 --- a/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java +++ b/x-pack/plugin/mapper-version/src/test/java/org/elasticsearch/xpack/versionfield/VersionStringFieldMapperTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Tuple; @@ -31,6 +32,7 @@ import java.util.Collections; import java.util.Comparator; import java.util.List; +import java.util.function.Function; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; @@ -185,6 +187,11 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) return new VersionStringSyntheticSourceSupport(); } + @Override + protected Function loadBlockExpected() { + return v -> new Version((BytesRef) v).toString(); + } + static class VersionStringSyntheticSourceSupport implements SyntheticSourceSupport { @Override public SyntheticSourceExample example(int maxValues) { diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index 368aab4615fc7..1cc37f5c4ffc0 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -39,7 +40,7 @@ testClusters.register('mixed-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'cluster.remote.my_remote_cluster.seeds', { remoteCluster.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE setting 'cluster.remote.connections_per_cluster', "1" user username: "test_user", password: "x-pack-test-password" diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java index a2fa26914e083..f0b23b087537b 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java @@ -303,9 +303,9 @@ public void testCategorizationStatePersistedOnSwitchToRealtime() throws Exceptio // before closing the job to prove that it was persisted in the background at the // end of lookback rather than when the job was closed. assertBusy(() -> { - SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setQuery(QueryBuilders.idsQuery().addIds(CategorizerState.documentId(job.getId(), 1))) - .get(); + SearchResponse stateDocsResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setQuery( + QueryBuilders.idsQuery().addIds(CategorizerState.documentId(job.getId(), 1)) + ).get(); SearchHit[] hits = stateDocsResponse.getHits().getHits(); assertThat(hits, arrayWithSize(1)); @@ -554,14 +554,11 @@ private static Job.Builder newJobBuilder(String id, List categorizationF private List getCategorizerStats(String jobId) throws IOException { - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) - .setQuery( - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), CategorizerStats.RESULT_TYPE_VALUE)) - .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) - ) - .setSize(1000) - .get(); + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).setQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), CategorizerStats.RESULT_TYPE_VALUE)) + .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) + ).setSize(1000).get(); List stats = new ArrayList<>(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java index ee15832d68488..610a492ef078f 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationHousePricingIT.java @@ -1566,7 +1566,7 @@ public void testFeatureImportanceValues() throws Exception { waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); // obtain addition information for investigation of #90599 String modelId = getModelId(jobId); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationIT.java index 3ddb05fd0218f..0b1edd0f35538 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ClassificationIT.java @@ -145,7 +145,7 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); @@ -210,7 +210,7 @@ public void testWithDatastreams() throws Exception { waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); @@ -259,7 +259,7 @@ public void testWithOnlyTrainingRowsAndTrainingPercentIsHundred() throws Excepti waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); @@ -348,7 +348,7 @@ public void testWithCustomFeatureProcessors() throws Exception { waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); @@ -425,7 +425,7 @@ public void testWithOnlyTrainingRowsAndTrainingPercentIsFifty( int trainingRowsCount = 0; int nonTrainingRowsCount = 0; client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); @@ -569,7 +569,7 @@ public void testStopAndRestart() throws Exception { waitUntilAnalyticsIsStopped(jobId); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); @@ -840,7 +840,7 @@ public void testDeleteExpiredData_RemovesUnusedState() throws Exception { // Now calling the _delete_expired_data API should remove unused state assertThat(deleteExpiredData().isDeleted(), is(true)); - SearchResponse stateIndexSearchResponse = client().prepareSearch(".ml-state*").execute().actionGet(); + SearchResponse stateIndexSearchResponse = prepareSearch(".ml-state*").execute().actionGet(); assertThat(stateIndexSearchResponse.getHits().getTotalHits().value, equalTo(0L)); } @@ -928,7 +928,7 @@ public void testWithSearchRuntimeMappings() throws Exception { waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse destData = client().prepareSearch(destIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse destData = prepareSearch(destIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : destData.getHits()) { Map destDoc = hit.getSourceAsMap(); Map resultsObject = getFieldValue(destDoc, "ml"); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalysisCustomFeatureIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalysisCustomFeatureIT.java index 91d302c381f5b..153f3aa46c359 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalysisCustomFeatureIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalysisCustomFeatureIT.java @@ -134,7 +134,7 @@ public void testNGramCustomFeature() throws Exception { waitUntilAnalyticsIsStopped(jobId); client().admin().indices().refresh(new RefreshRequest(destIndex)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getFieldValue(destDoc, "ml"); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java index dab0bb54a6762..9773a4d3b3d82 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedWithAggsIT.java @@ -164,21 +164,14 @@ private void testDfWithAggs(AggregatorFactories.Builder aggs, Detector.Builder d ); // Confirm that it's possible to search for the same buckets by @timestamp - proves that @timestamp works as a field alias assertThat( - client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) - .setQuery( - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery("job_id", jobId)) - .filter(QueryBuilders.termQuery("result_type", "bucket")) - .filter( - QueryBuilders.rangeQuery("@timestamp") - .gte(bucket.getTimestamp().getTime()) - .lte(bucket.getTimestamp().getTime()) - ) - ) - .setTrackTotalHits(true) - .get() - .getHits() - .getTotalHits().value, + prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).setQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("job_id", jobId)) + .filter(QueryBuilders.termQuery("result_type", "bucket")) + .filter( + QueryBuilders.rangeQuery("@timestamp").gte(bucket.getTimestamp().getTime()).lte(bucket.getTimestamp().getTime()) + ) + ).setTrackTotalHits(true).get().getHits().getTotalHits().value, equalTo(1L) ); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 15026a719590e..0bdf68cdbb5aa 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -268,14 +268,12 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw retainAllSnapshots("snapshots-retention-with-retain"); - long totalModelSizeStatsBeforeDelete = client().prepareSearch("*") - .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + long totalModelSizeStatsBeforeDelete = prepareSearch("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) .get() .getHits() .getTotalHits().value; - long totalNotificationsCountBeforeDelete = client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) - .get() + long totalNotificationsCountBeforeDelete = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).get() .getHits() .getTotalHits().value; assertThat(totalModelSizeStatsBeforeDelete, greaterThan(0L)); @@ -321,14 +319,12 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw assertThat(getRecords("results-and-snapshots-retention").size(), equalTo(0)); assertThat(getModelSnapshots("results-and-snapshots-retention").size(), equalTo(1)); - long totalModelSizeStatsAfterDelete = client().prepareSearch("*") - .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + long totalModelSizeStatsAfterDelete = prepareSearch("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.termQuery("result_type", "model_size_stats")) .get() .getHits() .getTotalHits().value; - long totalNotificationsCountAfterDelete = client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) - .get() + long totalNotificationsCountAfterDelete = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).get() .getHits() .getTotalHits().value; assertThat(totalModelSizeStatsAfterDelete, equalTo(totalModelSizeStatsBeforeDelete)); @@ -347,8 +343,7 @@ private void testExpiredDeletion(Float customThrottle, int numUnusedState) throw } // Verify .ml-state doesn't contain unused state documents - SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setFetchSource(false) + SearchResponse stateDocsResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false) .setTrackTotalHits(true) .setSize(10000) .get(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index a8825ea46774c..079443f5cf422 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -186,8 +186,7 @@ public void testScope() throws Exception { // Wait until the notification that the filter was updated is indexed assertBusy(() -> { - SearchResponse searchResponse = client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) - .setSize(1) + SearchResponse searchResponse = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery( QueryBuilders.boolQuery() diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java index 4cc4811c5bfe0..2a76fffc5559e 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java @@ -108,7 +108,7 @@ private static Map createRecord(long timestamp, String byFieldVa private void assertNoInterimResults(String jobId) { String indexName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); - SearchResponse search = client().prepareSearch(indexName).setSize(1000).setQuery(QueryBuilders.termQuery("is_interim", true)).get(); + SearchResponse search = prepareSearch(indexName).setSize(1000).setQuery(QueryBuilders.termQuery("is_interim", true)).get(); assertThat(search.getHits().getTotalHits().value, equalTo(0L)); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 5218dafc1af10..c57a983c2c61d 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -286,9 +286,9 @@ protected void assertThatNumberOfAnnotationsIsEqualTo(int expectedNumberOfAnnota } protected ForecastRequestStats getForecastStats(String jobId, String forecastId) { - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) - .setQuery(QueryBuilders.idsQuery().addIds(ForecastRequestStats.documentId(jobId, forecastId))) - .get(); + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).setQuery( + QueryBuilders.idsQuery().addIds(ForecastRequestStats.documentId(jobId, forecastId)) + ).get(); if (searchResponse.getHits().getHits().length == 0) { return null; @@ -313,8 +313,7 @@ protected ForecastRequestStats getForecastStats(String jobId, String forecastId) protected List getForecastStats() { List forecastStats = new ArrayList<>(); - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") - .setSize(1000) + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*").setSize(1000) .setQuery( QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), ForecastRequestStats.RESULT_TYPE_VALUE)) @@ -339,22 +338,20 @@ protected List getForecastStats() { } protected long countForecastDocs(String jobId, String forecastId) { - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") - .setQuery( - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), Forecast.RESULT_TYPE_VALUE)) - .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) - .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) - ) - .execute() - .actionGet(); + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*").setQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), Forecast.RESULT_TYPE_VALUE)) + .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) + .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) + ).execute().actionGet(); return searchResponse.getHits().getTotalHits().value; } protected List getForecasts(String jobId, ForecastRequestStats forecastRequestStats) { List forecasts = new ArrayList<>(); - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") - .setSize((int) forecastRequestStats.getRecordCount()) + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*").setSize( + (int) forecastRequestStats.getRecordCount() + ) .setQuery( QueryBuilders.boolQuery() .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), Forecast.RESULT_TYPE_VALUE)) diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index 3894c46e7a818..628400120bd41 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -248,7 +248,7 @@ protected List getProgress(String id) { protected SearchResponse searchStoredProgress(String jobId) { String docId = StoredProgress.documentId(jobId); - return client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setQuery(QueryBuilders.idsQuery().addIds(docId)).get(); + return prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setQuery(QueryBuilders.idsQuery().addIds(docId)).get(); } protected void assertExactlyOneInferenceModelPersisted(String jobId) { @@ -260,9 +260,9 @@ protected void assertAtLeastOneInferenceModelPersisted(String jobId) { } private void assertInferenceModelPersisted(String jobId, Matcher modelHitsArraySizeMatcher) { - SearchResponse searchResponse = client().prepareSearch(InferenceIndexConstants.LATEST_INDEX_NAME) - .setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(TrainedModelConfig.TAGS.getPreferredName(), jobId))) - .get(); + SearchResponse searchResponse = prepareSearch(InferenceIndexConstants.LATEST_INDEX_NAME).setQuery( + QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(TrainedModelConfig.TAGS.getPreferredName(), jobId)) + ).get(); // If the job is stopped during writing_results phase and it is then restarted, there is a chance two trained models // were persisted as there is no way currently for the process to be certain the model was persisted. assertThat( @@ -294,23 +294,20 @@ protected void waitUntilSomeProgressHasBeenMadeForPhase(String jobId, String pha } protected String getModelId(String jobId) { - SearchResponse searchResponse = client().prepareSearch(InferenceIndexConstants.LATEST_INDEX_NAME) - .setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(TrainedModelConfig.TAGS.getPreferredName(), jobId))) - .get(); + SearchResponse searchResponse = prepareSearch(InferenceIndexConstants.LATEST_INDEX_NAME).setQuery( + QueryBuilders.boolQuery().filter(QueryBuilders.termQuery(TrainedModelConfig.TAGS.getPreferredName(), jobId)) + ).get(); assertThat(searchResponse.getHits().getHits(), arrayWithSize(1)); return searchResponse.getHits().getHits()[0].getId(); } protected TrainedModelMetadata getModelMetadata(String modelId) { - SearchResponse response = client().prepareSearch(InferenceIndexConstants.INDEX_PATTERN) - .setQuery( - QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery("model_id", modelId)) - .filter(QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelMetadata.NAME)) - ) - .setSize(1) - .get(); + SearchResponse response = prepareSearch(InferenceIndexConstants.INDEX_PATTERN).setQuery( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("model_id", modelId)) + .filter(QueryBuilders.termQuery(InferenceIndexConstants.DOC_TYPE.getPreferredName(), TrainedModelMetadata.NAME)) + ).setSize(1).get(); assertThat(response.getHits().getHits(), arrayWithSize(1)); try ( @@ -364,7 +361,7 @@ protected static void assertThatAuditMessagesMatch(String configId, String... ex protected static Set getTrainingRowsIds(String index) { Set trainingRowsIds = new HashSet<>(); - SearchResponse hits = client().prepareSearch(index).setSize(10000).get(); + SearchResponse hits = prepareSearch(index).setSize(10000).get(); for (SearchHit hit : hits.getHits()) { Map sourceAsMap = hit.getSourceAsMap(); assertThat(sourceAsMap.containsKey("ml"), is(true)); @@ -381,9 +378,9 @@ protected static Set getTrainingRowsIds(String index) { } protected static void assertModelStatePersisted(String stateDocId) { - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setQuery(QueryBuilders.idsQuery().addIds(stateDocId)) - .get(); + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setQuery( + QueryBuilders.idsQuery().addIds(stateDocId) + ).get(); assertThat("Hits were: " + Strings.toString(searchResponse.getHits()), searchResponse.getHits().getHits(), is(arrayWithSize(1))); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index bf5298337dab4..46a4e008cc752 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -63,6 +63,7 @@ import org.elasticsearch.xpack.core.ilm.LifecycleSettings; import org.elasticsearch.xpack.core.ilm.LifecycleType; import org.elasticsearch.xpack.core.ilm.RolloverAction; +import org.elasticsearch.xpack.core.ilm.ShrinkAction; import org.elasticsearch.xpack.core.ilm.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.MlMetaIndex; @@ -319,6 +320,7 @@ protected void ensureClusterStateConsistency() throws IOException { entries.add(new NamedWriteableRegistry.Entry(LifecycleAction.class, DeleteAction.NAME, DeleteAction::readFrom)); entries.add(new NamedWriteableRegistry.Entry(LifecycleAction.class, ForceMergeAction.NAME, ForceMergeAction::new)); entries.add(new NamedWriteableRegistry.Entry(LifecycleAction.class, RolloverAction.NAME, RolloverAction::read)); + entries.add(new NamedWriteableRegistry.Entry(LifecycleAction.class, ShrinkAction.NAME, ShrinkAction::new)); entries.add( new NamedWriteableRegistry.Entry( PersistentTaskParams.class, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java index 36d5c71f399a5..d2fa397ab5d6b 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java @@ -159,10 +159,9 @@ private static DatafeedConfig newDatafeed(String datafeedId, String jobId) { } private Set modelPlotTerms(String jobId, String fieldName) { - SearchResponse searchResponse = client().prepareSearch(".ml-anomalies-" + jobId) - .setQuery(QueryBuilders.termQuery("result_type", "model_plot")) - .addAggregation(AggregationBuilders.terms("model_plot_terms").field(fieldName)) - .get(); + SearchResponse searchResponse = prepareSearch(".ml-anomalies-" + jobId).setQuery( + QueryBuilders.termQuery("result_type", "model_plot") + ).addAggregation(AggregationBuilders.terms("model_plot_terms").field(fieldName)).get(); Terms aggregation = searchResponse.getAggregations().get("model_plot_terms"); return aggregation.getBuckets().stream().map(agg -> agg.getKeyAsString()).collect(Collectors.toSet()); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/OutlierDetectionWithMissingFieldsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/OutlierDetectionWithMissingFieldsIT.java index 2265ad8e934ce..4433193e306f6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/OutlierDetectionWithMissingFieldsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/OutlierDetectionWithMissingFieldsIT.java @@ -89,7 +89,7 @@ public void testMissingFields() throws Exception { assertThat(stats.getDataCounts().getTestDocsCount(), equalTo(0L)); assertThat(stats.getDataCounts().getSkippedDocsCount(), equalTo(2L)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).get(); for (SearchHit hit : sourceData.getHits()) { GetResponse destDocGetResponse = client().prepareGet().setIndex(config.getDest().getIndex()).setId(hit.getId()).get(); assertThat(destDocGetResponse.isExists(), is(true)); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index 57702d205a31b..ad0d9135ebb96 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -64,8 +64,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t long job1CloseTime = System.currentTimeMillis() / 1000; // Check that state has been persisted - SearchResponse stateDocsResponse1 = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setFetchSource(false) + SearchResponse stateDocsResponse1 = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false) .setTrackTotalHits(true) .setSize(10000) .get(); @@ -104,8 +103,7 @@ public void testPersistJobOnGracefulShutdown_givenTimeAdvancedAfterNoNewData() t closeJob(jobId); // Check that a new state record exists. - SearchResponse stateDocsResponse2 = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setFetchSource(false) + SearchResponse stateDocsResponse2 = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false) .setTrackTotalHits(true) .setSize(10000) .get(); @@ -143,8 +141,7 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndTimeAdvanced() throws closeJob(jobId); // Check that state has been persisted - SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setFetchSource(false) + SearchResponse stateDocsResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false) .setTrackTotalHits(true) .setSize(10000) .get(); @@ -170,8 +167,7 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndTimeAdvanced() throws closeJob(jobId); deleteJob(jobId); - stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setFetchSource(false) + stateDocsResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false) .setTrackTotalHits(true) .setSize(10000) .get(); @@ -199,11 +195,11 @@ public void testPersistJobOnGracefulShutdown_givenNoDataAndNoTimeAdvance() throw closeJob(jobId); // Check that state has not been persisted - SearchResponse stateDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).get(); + SearchResponse stateDocsResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).get(); assertThat(Arrays.asList(stateDocsResponse.getHits().getHits()), empty()); // Check that results have not been persisted - SearchResponse resultsDocsResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get(); + SearchResponse resultsDocsResponse = prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)).get(); assertThat(Arrays.asList(resultsDocsResponse.getHits().getHits()), empty()); deleteJob(jobId); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index 993a65cf45bcf..7b977640e9b3a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -117,7 +117,7 @@ public void testSingleNumericFeatureAndMixedTrainingAndNonTrainingRows() throws // for debugging List> badDocuments = new ArrayList<>(); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getMlResultsObjectFromDestDoc(destDoc); @@ -207,7 +207,7 @@ public void testWithOnlyTrainingRowsAndTrainingPercentIsHundred() throws Excepti startAnalytics(jobId); waitUntilAnalyticsIsStopped(jobId); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map resultsObject = getMlResultsObjectFromDestDoc(getDestDoc(config, hit)); @@ -266,7 +266,7 @@ public void testWithOnlyTrainingRowsAndTrainingPercentIsFifty() throws Exception int trainingRowsCount = 0; int nonTrainingRowsCount = 0; - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map resultsObject = getMlResultsObjectFromDestDoc(getDestDoc(config, hit)); @@ -344,7 +344,7 @@ public void testStopAndRestart() throws Exception { waitUntilAnalyticsIsStopped(jobId); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map resultsObject = getMlResultsObjectFromDestDoc(getDestDoc(config, hit)); @@ -440,7 +440,7 @@ public void testDeleteExpiredData_RemovesUnusedState() throws Exception { // Now calling the _delete_expired_data API should remove unused state assertThat(deleteExpiredData().isDeleted(), is(true)); - SearchResponse stateIndexSearchResponse = client().prepareSearch(".ml-state*").execute().actionGet(); + SearchResponse stateIndexSearchResponse = prepareSearch(".ml-state*").execute().actionGet(); assertThat(stateIndexSearchResponse.getHits().getTotalHits().value, equalTo(0L)); } @@ -498,7 +498,7 @@ public void testWithDatastream() throws Exception { startAnalytics(jobId); waitUntilAnalyticsIsStopped(jobId); - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getMlResultsObjectFromDestDoc(destDoc); @@ -607,7 +607,7 @@ public void testAliasFields() throws Exception { double predictionErrorSum = 0.0; - SearchResponse sourceData = client().prepareSearch(sourceIndex).setSize(totalDocCount).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setSize(totalDocCount).get(); StringBuilder targetsPredictions = new StringBuilder(); // used to investigate #90599 for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); @@ -704,7 +704,7 @@ public void testWithCustomFeatureProcessors() throws Exception { waitUntilAnalyticsIsStopped(jobId); // for debugging - SearchResponse sourceData = client().prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : sourceData.getHits()) { Map destDoc = getDestDoc(config, hit); Map resultsObject = getMlResultsObjectFromDestDoc(destDoc); @@ -795,7 +795,7 @@ public void testWithSearchRuntimeMappings() throws Exception { startAnalytics(jobId); waitUntilAnalyticsIsStopped(jobId); - SearchResponse destData = client().prepareSearch(destIndex).setTrackTotalHits(true).setSize(1000).get(); + SearchResponse destData = prepareSearch(destIndex).setTrackTotalHits(true).setSize(1000).get(); for (SearchHit hit : destData.getHits()) { Map destDoc = hit.getSourceAsMap(); Map resultsObject = getMlResultsObjectFromDestDoc(destDoc); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 4e82720532454..90614703cc171 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -289,8 +289,7 @@ record = new HashMap<>(); } private Quantiles getQuantiles(String jobId) { - SearchResponse response = client().prepareSearch(".ml-state*") - .setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))) + SearchResponse response = prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))) .setSize(1) .get(); SearchHits hits = response.getHits(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index b495eb860b75d..3910ab0c1c523 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -102,7 +102,7 @@ public void testOutlierDetectionWithFewDocuments() throws Exception { assertThat(stats.getDataCounts().getTestDocsCount(), equalTo(0L)); assertThat(stats.getDataCounts().getSkippedDocsCount(), equalTo(0L)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).get(); double scoreOfOutlier = 0.0; double scoreOfNonOutlier = -1.0; for (SearchHit hit : sourceData.getHits()) { @@ -230,12 +230,11 @@ public void testOutlierDetectionWithEnoughDocumentsToScroll() throws Exception { waitUntilAnalyticsIsStopped(id); // Check we've got all docs - SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + SearchResponse searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docCount)); // Check they all have an outlier_score - searchResponse = client().prepareSearch(config.getDest().getIndex()) - .setTrackTotalHits(true) + searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docCount)); @@ -312,7 +311,7 @@ public void testOutlierDetectionWithMoreFieldsThanDocValueFieldLimit() throws Ex startAnalytics(id); waitUntilAnalyticsIsStopped(id); - SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).get(); for (SearchHit hit : sourceData.getHits()) { GetResponse destDocGetResponse = client().prepareGet().setIndex(config.getDest().getIndex()).setId(hit.getId()).get(); assertThat(destDocGetResponse.isExists(), is(true)); @@ -392,10 +391,9 @@ public void testStopOutlierDetectionWithEnoughDocumentsToScroll() throws Excepti return; } - SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + SearchResponse searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); if (searchResponse.getHits().getTotalHits().value == docCount) { - searchResponse = client().prepareSearch(config.getDest().getIndex()) - .setTrackTotalHits(true) + searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) .get(); logger.debug("We stopped during analysis: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); @@ -459,12 +457,11 @@ public void testOutlierDetectionWithMultipleSourceIndices() throws Exception { waitUntilAnalyticsIsStopped(id); // Check we've got all docs - SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + SearchResponse searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); // Check they all have an outlier_score - searchResponse = client().prepareSearch(config.getDest().getIndex()) - .setTrackTotalHits(true) + searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("ml.outlier_score")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); @@ -523,12 +520,11 @@ public void testOutlierDetectionWithPreExistingDestIndex() throws Exception { waitUntilAnalyticsIsStopped(id); // Check we've got all docs - SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + SearchResponse searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); // Check they all have an outlier_score - searchResponse = client().prepareSearch(config.getDest().getIndex()) - .setTrackTotalHits(true) + searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("ml.outlier_score")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) bulkRequestBuilder.numberOfActions())); @@ -690,12 +686,11 @@ public void testOutlierDetectionStopAndRestart() throws Exception { waitUntilAnalyticsIsStopped(id); // Check we've got all docs - SearchResponse searchResponse = client().prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); + SearchResponse searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true).get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docCount)); // Check they all have an outlier_score - searchResponse = client().prepareSearch(config.getDest().getIndex()) - .setTrackTotalHits(true) + searchResponse = prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) .get(); assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docCount)); @@ -750,7 +745,7 @@ public void testOutlierDetectionWithCustomParams() throws Exception { startAnalytics(id); waitUntilAnalyticsIsStopped(id); - SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).get(); double scoreOfOutlier = 0.0; double scoreOfNonOutlier = -1.0; for (SearchHit hit : sourceData.getHits()) { @@ -859,7 +854,7 @@ public void testOutlierDetection_GivenIndexWithRuntimeFields() throws Exception assertThat(stats.getDataCounts().getTestDocsCount(), equalTo(0L)); assertThat(stats.getDataCounts().getSkippedDocsCount(), equalTo(0L)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).get(); double scoreOfOutlier = 0.0; double scoreOfNonOutlier = -1.0; for (SearchHit hit : sourceData.getHits()) { @@ -964,7 +959,7 @@ public void testOutlierDetection_GivenSearchRuntimeMappings() throws Exception { assertThat(stats.getDataCounts().getTestDocsCount(), equalTo(0L)); assertThat(stats.getDataCounts().getSkippedDocsCount(), equalTo(0L)); - SearchResponse sourceData = client().prepareSearch(sourceIndex).get(); + SearchResponse sourceData = prepareSearch(sourceIndex).get(); double scoreOfOutlier = 0.0; double scoreOfNonOutlier = -1.0; for (SearchHit hit : sourceData.getHits()) { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java index eeb8e54892380..45d1e57a52f46 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java @@ -248,8 +248,7 @@ public void testAddEventsToOpenJob() throws Exception { // Wait until the notification that the process was updated is indexed assertBusy(() -> { - SearchResponse searchResponse = client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) - .setSize(1) + SearchResponse searchResponse = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery( QueryBuilders.boolQuery() @@ -335,8 +334,7 @@ public void testAddOpenedJobToGroupWithCalendar() throws Exception { // Wait until the notification that the job was updated is indexed assertBusy(() -> { - SearchResponse searchResponse = client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) - .setSize(1) + SearchResponse searchResponse = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery( QueryBuilders.boolQuery() @@ -422,8 +420,7 @@ public void testNewJobWithGlobalCalendar() throws Exception { // Wait until the notification that the job was updated is indexed assertBusy(() -> { - SearchResponse searchResponse = client().prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX) - .setSize(1) + SearchResponse searchResponse = prepareSearch(NotificationsIndex.NOTIFICATIONS_INDEX).setSize(1) .addSort("timestamp", SortOrder.DESC) .setQuery( QueryBuilders.boolQuery() diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java index 5980e04acbf9c..3f2ca0703bbdc 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/AnnotationIndexIT.java @@ -14,9 +14,7 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -38,6 +36,7 @@ import java.util.Map; import java.util.Set; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -283,12 +282,7 @@ public void testNotCreatedWhenAfterOtherMlIndexAndUpgradeInProgress() throws Exc try { assertBusy(() -> { - try { - SearchResponse response = client().search(new SearchRequest(".ml-notifications*")).actionGet(); - assertEquals(1, response.getHits().getHits().length); - } catch (SearchPhaseExecutionException e) { - throw new AssertionError("Notifications index exists but shards not yet ready - continuing busy wait", e); - } + assertHitCount(client().search(new SearchRequest(".ml-notifications*")), 1); assertFalse(annotationsIndexExists(AnnotationIndex.LATEST_INDEX_NAME)); assertEquals(0, numberOfAnnotationsAliases()); }); @@ -316,8 +310,7 @@ public void testNotCreatedWhenAfterOtherMlIndexAndResetInProgress() throws Excep // to be created, but in this case it shouldn't as we're doing a reset assertBusy(() -> { - SearchResponse response = client().search(new SearchRequest(".ml-state")).actionGet(); - assertEquals(1, response.getHits().getHits().length); + assertHitCount(client().search(new SearchRequest(".ml-state")), 1); assertFalse(annotationsIndexExists(AnnotationIndex.LATEST_INDEX_NAME)); assertEquals(0, numberOfAnnotationsAliases()); }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java index 309ca2211b1c7..d356fe49f9120 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextAggregationIT.java @@ -39,8 +39,7 @@ public void setupCluster() { } public void testAggregation() { - SearchResponse response = client().prepareSearch(DATA_INDEX) - .setSize(0) + SearchResponse response = prepareSearch(DATA_INDEX).setSize(0) .setTrackTotalHits(false) .addAggregation( new CategorizeTextAggregationBuilder("categorize", "msg").subAggregation(AggregationBuilders.max("max").field("time")) @@ -57,8 +56,7 @@ public void testAggregation() { } public void testAggregationWithOnlyOneBucket() { - SearchResponse response = client().prepareSearch(DATA_INDEX) - .setSize(0) + SearchResponse response = prepareSearch(DATA_INDEX).setSize(0) .setTrackTotalHits(false) .addAggregation( new CategorizeTextAggregationBuilder("categorize", "msg").size(1) @@ -73,8 +71,7 @@ public void testAggregationWithOnlyOneBucket() { } public void testAggregationWithBroadCategories() { - SearchResponse response = client().prepareSearch(DATA_INDEX) - .setSize(0) + SearchResponse response = prepareSearch(DATA_INDEX).setSize(0) .setTrackTotalHits(false) .addAggregation( // Overriding the similarity threshold to just 11% (default is 70%) results in the diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextDistributedIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextDistributedIT.java index 6f32acd5f08d8..c91b2e8d5b540 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextDistributedIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/CategorizeTextDistributedIT.java @@ -82,11 +82,9 @@ public void testDistributedCategorizeText() { .collect(Collectors.toSet()); assertThat(nodesWithShards, hasSize(internalCluster().size())); - SearchResponse searchResponse = client().prepareSearch(indexName) - .addAggregation(new CategorizeTextAggregationBuilder("categories", "message")) - .setSize(0) - .execute() - .actionGet(); + SearchResponse searchResponse = prepareSearch(indexName).addAggregation( + new CategorizeTextAggregationBuilder("categories", "message") + ).setSize(0).execute().actionGet(); InternalCategorizationAggregation aggregation = searchResponse.getAggregations().get("categories"); assertThat(aggregation, notNullValue()); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java index c1a74444d19ce..aa8b29228b790 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobStorageDeletionTaskIT.java @@ -203,8 +203,7 @@ public void testDeleteDedicatedJobWithDataInShared() throws Exception { // Make sure all results referencing the dedicated job are gone assertThat( - client().prepareSearch() - .setIndices(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") + prepareSearch().setIndices(AnomalyDetectorsIndex.jobResultsIndexPrefix() + "*") .setIndicesOptions(IndicesOptions.lenientExpandOpenHidden()) .setTrackTotalHits(true) .setSize(0) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 777d563314887..685592afef167 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -751,8 +751,7 @@ private void run(String jobId, CheckedRunnable disrupt) throws Except // so when restarting job on another node the data counts // are what we expect them to be: private static DataCounts getDataCountsFromIndex(String jobId) { - SearchResponse searchResponse = client().prepareSearch() - .setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) + SearchResponse searchResponse = prepareSearch().setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId))) .get(); if (searchResponse.getHits().getTotalHits().value != 1) { diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java index e2dc111007aac..6a3ff6551503f 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/NetworkDisruptionIT.java @@ -81,12 +81,9 @@ public void testJobRelocation() throws Exception { assertEquals(newJobNode, finalJobNode); // The job running on the original node should have been killed, and hence should not have persisted quantiles - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId()))) - .setTrackTotalHits(true) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute() - .actionGet(); + SearchResponse searchResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setQuery( + QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId())) + ).setTrackTotalHits(true).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().actionGet(); assertEquals(0L, searchResponse.getHits().getTotalHits().value); CloseJobAction.Request closeJobRequest = new CloseJobAction.Request(job.getId()); @@ -94,12 +91,9 @@ public void testJobRelocation() throws Exception { assertTrue(closeJobResponse.isClosed()); // The relocated job was closed rather than killed, and hence should have persisted quantiles - searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) - .setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId()))) - .setTrackTotalHits(true) - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .execute() - .actionGet(); + searchResponse = prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setQuery( + QueryBuilders.idsQuery().addIds(Quantiles.documentId(job.getId())) + ).setTrackTotalHits(true).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().actionGet(); assertEquals(1L, searchResponse.getHits().getTotalHits().value); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index cb38e23c7f8eb..f4bce4906c0b0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -29,7 +29,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -45,12 +44,10 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.AssociatedIndexDescriptor; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider; import org.elasticsearch.indices.breaker.BreakerSettings; @@ -73,15 +70,11 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.ShutdownAwarePlugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.ContextParser; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -884,22 +877,14 @@ public List> getRescorers() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); + Environment environment = services.environment(); + NamedXContentRegistry xContentRegistry = services.xContentRegistry(); + IndexNameExpressionResolver indexNameExpressionResolver = services.indexNameExpressionResolver(); + if (enabled == false) { // Holders for @link(MachineLearningFeatureSetUsage) which needs access to job manager and ML extension, // both empty if ML is disabled diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java index 6a67f942c0f19..cd7d4258855eb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlInfoAction.java @@ -11,11 +11,13 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.Processors; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -26,14 +28,20 @@ import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.NodeLoadDetector; import org.elasticsearch.xpack.ml.process.MlControllerHolder; +import org.elasticsearch.xpack.ml.utils.MlProcessors; import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; import java.io.IOException; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.OptionalLong; import java.util.concurrent.TimeoutException; public class TransportMlInfoAction extends HandledTransportAction { @@ -137,6 +145,38 @@ private Map limits() { limits.put("effective_max_model_memory_limit", effectiveMaxModelMemoryLimit.getStringRep()); } limits.put("total_ml_memory", NativeMemoryCalculator.calculateTotalMlMemory(clusterSettings, nodes).getStringRep()); + + // Add processor information _if_ known with certainty. It won't be known with certainty if autoscaling is enabled. + // If we can scale up in terms of memory, assume we can also scale up in terms of processors. + List mlNodes = nodes.stream().filter(MachineLearning::isMlNode).toList(); + if (areMlNodesBiggestSize(clusterSettings.get(MachineLearning.MAX_ML_NODE_SIZE), mlNodes)) { + Processors singleNodeProcessors = MlProcessors.getMaxMlNodeProcessors( + nodes, + clusterSettings.get(MachineLearning.ALLOCATED_PROCESSORS_SCALE) + ); + if (singleNodeProcessors.count() > 0) { + limits.put("max_single_ml_node_processors", singleNodeProcessors.roundDown()); + } + Processors totalMlProcessors = MlProcessors.getTotalMlNodeProcessors( + nodes, + clusterSettings.get(MachineLearning.ALLOCATED_PROCESSORS_SCALE) + ); + if (totalMlProcessors.count() > 0) { + int potentialExtraProcessors = Math.max(0, clusterSettings.get(MachineLearning.MAX_LAZY_ML_NODES) - mlNodes.size()) + * singleNodeProcessors.roundDown(); + limits.put("total_ml_processors", totalMlProcessors.roundDown() + potentialExtraProcessors); + } + } return limits; } + + static boolean areMlNodesBiggestSize(ByteSizeValue maxMLNodeSize, Collection mlNodes) { + if (maxMLNodeSize.getBytes() == 0) { + return true; + } + + OptionalLong smallestMLNode = mlNodes.stream().map(NodeLoadDetector::getNodeSize).flatMapToLong(OptionalLong::stream).min(); + + return smallestMLNode.isPresent() && smallestMLNode.getAsLong() >= maxMLNodeSize.getBytes(); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java index 2dcfdf7603c7f..5c89c29a70cdd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingResourceTracker.java @@ -141,9 +141,9 @@ static void getMemoryAndProcessors( Long jobMemory = mlMemoryTracker.getAnomalyDetectorJobMemoryRequirement(jobId); if (jobMemory == null) { - // TODO: this indicates a bug, should we indicate that the result is incomplete? - logger.debug("could not find memory requirement for job [{}], skipping", jobId); - continue; + logger.debug("could not find memory requirement for job [{}], returning no-scale", jobId); + listener.onResponse(noScaleStats(numberMlNodes)); + return; } if (AWAITING_LAZY_ASSIGNMENT.equals(task.getAssignment())) { @@ -169,9 +169,9 @@ static void getMemoryAndProcessors( Long jobMemory = mlMemoryTracker.getDataFrameAnalyticsJobMemoryRequirement(jobId); if (jobMemory == null) { - // TODO: this indicates a bug, should we indicate that the result is incomplete? - logger.debug("could not find memory requirement for job [{}], skipping", jobId); - continue; + logger.debug("could not find memory requirement for job [{}], returning no-scale", jobId); + listener.onResponse(noScaleStats(numberMlNodes)); + return; } if (AWAITING_LAZY_ASSIGNMENT.equals(task.getAssignment())) { @@ -291,6 +291,10 @@ static void getMemoryAndProcessors( */ public static MlAutoscalingStats noScaleStats(ClusterState clusterState) { int numberMlNodes = (int) clusterState.nodes().stream().filter(node -> node.getRoles().contains(DiscoveryNodeRole.ML_ROLE)).count(); + return noScaleStats(numberMlNodes); + } + + private static MlAutoscalingStats noScaleStats(int numberMlNodes) { return new MlAutoscalingStats( numberMlNodes, 0, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index ca9a99f2d96f5..2caf338d2a3c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -164,8 +164,6 @@ public void clusterChanged(ClusterChangedEvent event) { } if (eventStateMinTransportVersionIsBeforeDistributedModelAllocationTransportVersion(event)) { - logger.trace("min transport version is before assignment change on " + event.state().nodes().getAllNodes().size() + " nodes"); - // we should not try to rebalance assignments while there may be nodes running on a version // prior to introducing distributed model allocation. // But we should remove routing to removed or shutting down nodes. @@ -240,7 +238,6 @@ public void onFailure(Exception e) { } private void removeRoutingToRemovedOrShuttingDownNodes(ClusterChangedEvent event) { - logger.trace("remove routing to removed or shutting down nodes "); if (areAssignedNodesRemoved(event)) { submitUnbatchedTask("removing routing entries for removed or shutting down nodes", new ClusterStateUpdateTask() { @Override @@ -285,7 +282,6 @@ static boolean areAssignedNodesRemoved(ClusterChangedEvent event) { // Visible for testing static ClusterState removeRoutingToUnassignableNodes(ClusterState currentState) { - logger.trace("remove routing to unassignable nodes"); Set assignableNodes = getAssignableNodes(currentState).stream().map(DiscoveryNode::getId).collect(Collectors.toSet()); TrainedModelAssignmentMetadata metadata = TrainedModelAssignmentMetadata.fromState(currentState); TrainedModelAssignmentMetadata.Builder builder = TrainedModelAssignmentMetadata.builder(currentState); @@ -435,7 +431,6 @@ public void createNewModelAssignment( } public void setModelAssignmentToStopping(String modelId, ActionListener listener) { - logger.trace("set to stopping"); submitUnbatchedTask("set model assignment stopping", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -455,7 +450,6 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) } public void removeModelAssignment(String deploymentId, ActionListener listener) { - logger.trace("remove model assignments"); submitUnbatchedTask("delete model deployment assignment", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -492,7 +486,6 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) // Used by the reset action directly public void removeAllModelAssignments(ActionListener listener) { - logger.trace("remove all assignments"); submitUnbatchedTask("delete all model assignments", new ClusterStateUpdateTask() { @Override public ClusterState execute(ClusterState currentState) { @@ -525,11 +518,9 @@ private static ClusterState forceUpdate(ClusterState currentState, TrainedModelA logger.debug(() -> format("updated assignments: %s", modelAssignments.build())); Metadata.Builder metadata = Metadata.builder(currentState.metadata()); if (currentState.getMinTransportVersion().onOrAfter(RENAME_ALLOCATION_TO_ASSIGNMENT_TRANSPORT_VERSION)) { - logger.trace("putting custom new name"); metadata.putCustom(TrainedModelAssignmentMetadata.NAME, modelAssignments.build()) .removeCustom(TrainedModelAssignmentMetadata.DEPRECATED_NAME); } else { - logger.trace("putting custom old name"); metadata.putCustom(TrainedModelAssignmentMetadata.DEPRECATED_NAME, modelAssignments.buildOld()); } return ClusterState.builder(currentState).metadata(metadata).build(); @@ -625,7 +616,6 @@ ClusterState stopPlatformSpecificModelsInHeterogeneousClusters( modelToAdd.get().getModelId(), mlNodesArchitectures ); - logger.info(reasonToStop); updatedState = callSetToStopping(reasonToStop, modelToAdd.get().getDeploymentId(), clusterState); } return updatedState; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java index 550742ef04b5a..139eceb09bd02 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicator.java @@ -170,6 +170,7 @@ public void close() { killProcess(false, false); stateStreamer.cancel(); } + dataCountsReporter.writeUnreportedCounts(); autodetectResultProcessor.awaitCompletion(); } finally { onFinishHandler.accept(null, true); @@ -180,7 +181,6 @@ public void close() { try { future.get(); autodetectWorkerExecutor.shutdownNow(); - dataCountsReporter.writeUnreportedCounts(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } catch (ExecutionException e) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java index 1c45c6da2bcc7..1769a7946ce80 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/MlProcessors.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.ml.utils; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.unit.Processors; import org.elasticsearch.xpack.ml.MachineLearning; @@ -44,4 +46,29 @@ public static Processors get(DiscoveryNode node, Integer allocatedProcessorScale return Processors.ZERO; } } + + public static Processors getMaxMlNodeProcessors(DiscoveryNodes nodes, Integer allocatedProcessorScale) { + Processors answer = Processors.ZERO; + for (DiscoveryNode node : nodes) { + if (node.getRoles().contains(DiscoveryNodeRole.ML_ROLE)) { + Processors nodeProcessors = get(node, allocatedProcessorScale); + if (answer.compareTo(nodeProcessors) < 0) { + answer = nodeProcessors; + } + } + } + return answer; + } + + public static Processors getTotalMlNodeProcessors(DiscoveryNodes nodes, Integer allocatedProcessorScale) { + int total = 0; + for (DiscoveryNode node : nodes) { + if (node.getRoles().contains(DiscoveryNodeRole.ML_ROLE)) { + Processors nodeProcessors = get(node, allocatedProcessorScale); + // Round down before summing, because ML only uses whole processors + total += nodeProcessors.roundDown(); + } + } + return Processors.of((double) total); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportMlInfoActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportMlInfoActionTests.java new file mode 100644 index 0000000000000..24f2c48d5cde0 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportMlInfoActionTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.util.Map; +import java.util.Set; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.is; + +public class TransportMlInfoActionTests extends ESTestCase { + + public void testAreMlNodesBiggestSize() { + boolean expectedResult = randomBoolean(); + long mlNodeSize = randomLongBetween(10000000L, 10000000000L); + long biggestSize = expectedResult ? mlNodeSize : mlNodeSize * randomLongBetween(2, 5); + int numMlNodes = randomIntBetween(2, 4); + var nodes = Stream.generate( + () -> DiscoveryNodeUtils.builder("node") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.MACHINE_MEMORY_NODE_ATTR, Long.toString(mlNodeSize))) + .build() + ).limit(numMlNodes).toList(); + assertThat(TransportMlInfoAction.areMlNodesBiggestSize(ByteSizeValue.ofBytes(biggestSize), nodes), is(expectedResult)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java index a8d3af2efe7cd..5c98ac53c7228 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/ingest/InferenceProcessorFactoryTests.java @@ -922,8 +922,22 @@ private static ClusterState builderClusterStateWithModelReferences(MlConfigVersi Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.ML_ROLE, DiscoveryNodeRole.DATA_ROLE) ) ) - .add(DiscoveryNodeUtils.create("current_node", new TransportAddress(InetAddress.getLoopbackAddress(), 9302))) - .add(DiscoveryNodeUtils.create("_node_id", new TransportAddress(InetAddress.getLoopbackAddress(), 9304))) + .add( + DiscoveryNodeUtils.create( + "current_node", + new TransportAddress(InetAddress.getLoopbackAddress(), 9302), + Map.of(MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.CURRENT.toString()), + Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.ML_ROLE, DiscoveryNodeRole.DATA_ROLE) + ) + ) + .add( + DiscoveryNodeUtils.create( + "_node_id", + new TransportAddress(InetAddress.getLoopbackAddress(), 9304), + Map.of(MachineLearning.ML_CONFIG_VERSION_NODE_ATTR, MlConfigVersion.CURRENT.toString()), + Set.of(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.ML_ROLE, DiscoveryNodeRole.DATA_ROLE) + ) + ) .localNodeId("_node_id") .masterNodeId("_node_id") ) diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java index d34343a4dc200..59a79def9bd10 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/MockClientBuilder.java @@ -29,14 +29,10 @@ import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.sort.SortBuilder; -import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentBuilder; -import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -109,22 +105,6 @@ public MockClientBuilder prepareCreate(String index) { return this; } - public MockClientBuilder prepareSearch(String index, int from, int size, SearchResponse response, ArgumentCaptor filter) { - SearchRequestBuilder builder = mock(SearchRequestBuilder.class); - when(builder.addSort(any(SortBuilder.class))).thenReturn(builder); - when(builder.setQuery(filter.capture())).thenReturn(builder); - when(builder.setPostFilter(filter.capture())).thenReturn(builder); - when(builder.setFrom(eq(from))).thenReturn(builder); - when(builder.setSize(eq(size))).thenReturn(builder); - when(builder.setFetchSource(eq(true))).thenReturn(builder); - when(builder.addDocValueField(any(String.class))).thenReturn(builder); - when(builder.addDocValueField(any(String.class), any(String.class))).thenReturn(builder); - when(builder.addSort(any(String.class), any(SortOrder.class))).thenReturn(builder); - when(builder.get()).thenReturn(response); - when(client.prepareSearch(eq(index))).thenReturn(builder); - return this; - } - public MockClientBuilder prepareSearches(String index, SearchRequestBuilder first, SearchRequestBuilder... searches) { when(client.prepareSearch(eq(index))).thenReturn(first, searches); return this; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlProcessorsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlProcessorsTests.java index 2ff3196dc87e9..b1b213e2c3f15 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlProcessorsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlProcessorsTests.java @@ -7,11 +7,14 @@ package org.elasticsearch.xpack.ml.utils; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.ml.MachineLearning; import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -34,4 +37,190 @@ public void testGetWithNull() { var processor = MlProcessors.get(node, null); assertThat(processor.count(), equalTo(8.0)); } + + public void testGetMaxMlNodeProcessors() { + var nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("n1") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "8.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n2") + .roles(Set.of(DiscoveryNodeRole.DATA_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "9.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n3") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "7.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n4") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.0")) + .build() + ) + .build(); + var processor = MlProcessors.getMaxMlNodeProcessors(nodes, 1); + assertThat(processor.count(), equalTo(8.0)); + } + + public void testGetMaxMlNodeProcessorsWithScale() { + var nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("n1") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "8.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n2") + .roles(Set.of(DiscoveryNodeRole.DATA_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "9.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n3") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "12.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n4") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "10.0")) + .build() + ) + .build(); + var processor = MlProcessors.getMaxMlNodeProcessors(nodes, 2); + assertThat(processor.count(), equalTo(6.0)); + } + + public void testGetMaxMlNodeProcessorsWithNull() { + var nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("n1") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n2") + .roles(Set.of(DiscoveryNodeRole.DATA_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "9.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n3") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "7.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n4") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.0")) + .build() + ) + .build(); + var processor = MlProcessors.getMaxMlNodeProcessors(nodes, null); + assertThat(processor.count(), equalTo(7.0)); + } + + public void testGetTotalMlNodeProcessors() { + var nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("n1") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "8.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n2") + .roles(Set.of(DiscoveryNodeRole.DATA_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "9.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n3") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "7.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n4") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.0")) + .build() + ) + .build(); + var processor = MlProcessors.getTotalMlNodeProcessors(nodes, 1); + assertThat(processor.count(), equalTo(15.0)); + } + + public void testGetTotalMlNodeProcessorsWithScale() { + var nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("n1") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "8.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n2") + .roles(Set.of(DiscoveryNodeRole.DATA_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "9.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n3") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "7.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n4") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.0")) + .build() + ) + .build(); + var processor = MlProcessors.getTotalMlNodeProcessors(nodes, 2); + assertThat(processor.count(), equalTo(7.0)); + } + + public void testGetTotalMlNodeProcessorsWithNull() { + var nodes = DiscoveryNodes.builder() + .add( + DiscoveryNodeUtils.builder("n1") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.5")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n2") + .roles(Set.of(DiscoveryNodeRole.DATA_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "9.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n3") + .roles(Set.of(DiscoveryNodeRole.ML_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "7.0")) + .build() + ) + .add( + DiscoveryNodeUtils.builder("n4") + .roles(Set.of(DiscoveryNodeRole.MASTER_ROLE)) + .attributes(Map.of(MachineLearning.ALLOCATED_PROCESSORS_NODE_ATTR, "6.0")) + .build() + ) + .build(); + var processor = MlProcessors.getTotalMlNodeProcessors(nodes, null); + assertThat(processor.count(), equalTo(13.0)); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java index c85fda5eee4b3..939ccde7df6c4 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/TransportVersionUtilsTests.java @@ -40,6 +40,7 @@ public class TransportVersionUtilsTests extends ESTestCase { null, null, transportVersions, + null, ClusterBlocks.EMPTY_CLUSTER_BLOCK, null, false, @@ -66,6 +67,7 @@ public void testIsMinTransformVersionSameAsCurrent() { null, null, transportVersions1, + null, ClusterBlocks.EMPTY_CLUSTER_BLOCK, null, false, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java index 47e24b60896da..bab8e5b22c37a 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/Monitoring.java @@ -12,17 +12,12 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.LicensedFeature; @@ -30,14 +25,9 @@ import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -118,22 +108,11 @@ protected LicenseService getLicenseService() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); + final ClusterSettings clusterSettings = clusterService.getClusterSettings(); final CleanerService cleanerService = new CleanerService(settings, clusterSettings, threadPool); final SSLService dynamicSSLService = getSslService().createDynamicSSLService(); @@ -157,7 +136,14 @@ public Collection createComponents( Set collectors = new HashSet<>(); collectors.add(new IndexStatsCollector(clusterService, getLicenseState(), client)); collectors.add( - new ClusterStatsCollector(settings, clusterService, getLicenseState(), client, getLicenseService(), expressionResolver) + new ClusterStatsCollector( + settings, + clusterService, + getLicenseState(), + client, + getLicenseService(), + services.indexNameExpressionResolver() + ) ); collectors.add(new ShardsCollector(clusterService, getLicenseState())); collectors.add(new NodeStatsCollector(clusterService, getLicenseState(), client)); @@ -175,7 +161,7 @@ public Collection createComponents( clusterService, threadPool, client, - xContentRegistry + services.xContentRegistry() ); templateRegistry.initialize(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java index 29b044e81f7d6..02c9a8e2f210c 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/MultiNodesStatsTests.java @@ -78,11 +78,9 @@ public void testMultipleNodes() throws Exception { flush(ALL_MONITORING_INDICES); refresh(); - SearchResponse response = client().prepareSearch(ALL_MONITORING_INDICES) - .setQuery(QueryBuilders.termQuery("type", NodeStatsMonitoringDoc.TYPE)) - .setSize(0) - .addAggregation(AggregationBuilders.terms("nodes_ids").field("node_stats.node_id")) - .get(); + SearchResponse response = prepareSearch(ALL_MONITORING_INDICES).setQuery( + QueryBuilders.termQuery("type", NodeStatsMonitoringDoc.TYPE) + ).setSize(0).addAggregation(AggregationBuilders.terms("nodes_ids").field("node_stats.node_id")).get(); for (Aggregation aggregation : response.getAggregations()) { assertThat(aggregation, instanceOf(StringTerms.class)); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java index 015a8c87824f5..c782f25fdad4c 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsMonitoringDocTests.java @@ -44,6 +44,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.license.License; @@ -457,7 +458,7 @@ public void testToXContent() throws IOException { mockNodeVersion, pluginEsBuildVersion, Version.CURRENT, - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current(), apmIndicesExist }; final String expectedJson = Strings.format(""" @@ -768,7 +769,13 @@ public void testToXContent() throws IOException { "max_index_version":%s } }, - "nodes_versions": [] + "nodes_versions": [], + "nodes_features": [ + { + "node_id": "_node_id", + "features": [] + } + ] }, "cluster_settings": { "cluster": { diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java index 6d16c7b84a99b..35b5578602621 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndexRecoveryMonitoringDocTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Strings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.transport.NodeDisconnectedException; @@ -100,7 +101,7 @@ public void testToXContent() throws IOException { .version( new VersionInformation( Version.CURRENT.minimumCompatibilityVersion(), - IndexVersion.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current() ) ) diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 503d83162df5e..91dff9abcc5e2 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -111,7 +111,7 @@ public void testExport() throws Exception { assertThat(indexExists(".monitoring-*"), is(true)); ensureYellowAndNoInitializingShards(".monitoring-*"); - SearchResponse response = client().prepareSearch(".monitoring-*").get(); + SearchResponse response = prepareSearch(".monitoring-*").get(); assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value)); }); @@ -125,8 +125,7 @@ public void testExport() throws Exception { ensureYellowAndNoInitializingShards(".monitoring-*"); assertThat( - client().prepareSearch(".monitoring-es-*") - .setSize(0) + prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "cluster_stats")) .get() .getHits() @@ -135,8 +134,7 @@ public void testExport() throws Exception { ); assertThat( - client().prepareSearch(".monitoring-es-*") - .setSize(0) + prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "index_recovery")) .get() .getHits() @@ -145,8 +143,7 @@ public void testExport() throws Exception { ); assertThat( - client().prepareSearch(".monitoring-es-*") - .setSize(0) + prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "index_stats")) .get() .getHits() @@ -155,8 +152,7 @@ public void testExport() throws Exception { ); assertThat( - client().prepareSearch(".monitoring-es-*") - .setSize(0) + prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "indices_stats")) .get() .getHits() @@ -165,8 +161,7 @@ public void testExport() throws Exception { ); assertThat( - client().prepareSearch(".monitoring-es-*") - .setSize(0) + prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "shards")) .get() .getHits() @@ -174,8 +169,7 @@ public void testExport() throws Exception { greaterThan(0L) ); - SearchResponse response = client().prepareSearch(".monitoring-es-*") - .setSize(0) + SearchResponse response = prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "node_stats")) .addAggregation(terms("agg_nodes_ids").field("node_stats.node_id")) .get(); @@ -212,8 +206,7 @@ public void testExport() throws Exception { ensureYellowAndNoInitializingShards(".monitoring-*"); refresh(".monitoring-es-*"); - SearchResponse response = client().prepareSearch(".monitoring-es-*") - .setSize(0) + SearchResponse response = prepareSearch(".monitoring-es-*").setSize(0) .setQuery(QueryBuilders.termQuery("type", "node_stats")) .addAggregation( terms("agg_nodes_ids").field("node_stats.node_id").subAggregation(max("agg_last_time_collected").field("timestamp")) @@ -271,7 +264,7 @@ private void checkMonitoringDocs() { DateFormatter dateParser = DateFormatter.forPattern("strict_date_time"); DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC); - SearchResponse searchResponse = client().prepareSearch(".monitoring-*").setSize(100).get(); + SearchResponse searchResponse = prepareSearch(".monitoring-*").setSize(100).get(); assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index d96ee3726a032..71590943e4bf6 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -265,7 +265,7 @@ private void assertWatchesExist() { SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource() .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); Set watchIds = new HashSet<>(Arrays.asList(ClusterAlertsUtil.WATCH_IDS)); - for (SearchHit hit : client().prepareSearch(".watches").setSource(searchSource).get().getHits().getHits()) { + for (SearchHit hit : prepareSearch(".watches").setSource(searchSource).get().getHits().getHits()) { String watchId = ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap()); assertNotNull("Missing watch ID", watchId); assertTrue("found unexpected watch id", watchIds.contains(watchId)); @@ -289,7 +289,7 @@ private void assertNoWatchesExist() { String clusterUUID = clusterService().state().getMetadata().clusterUUID(); SearchSourceBuilder searchSource = SearchSourceBuilder.searchSource() .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); - SearchResponse searchResponse = client().prepareSearch(".watches").setSource(searchSource).get(); + SearchResponse searchResponse = prepareSearch(".watches").setSource(searchSource).get(); if (searchResponse.getHits().getTotalHits().value > 0) { List invalidWatches = new ArrayList<>(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 5fc5d20461fe8..955cf0396326b 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -14,23 +14,17 @@ import org.elasticsearch.Build; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; @@ -41,7 +35,6 @@ import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.translog.TranslogStats; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicensedFeature; @@ -52,15 +45,10 @@ import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotRestoreException; import org.elasticsearch.snapshots.sourceonly.SourceOnlySnapshotRepository; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -75,7 +63,6 @@ import java.util.Optional; import java.util.function.BiConsumer; import java.util.function.Function; -import java.util.function.Supplier; public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin, ActionPlugin, EnginePlugin { @@ -90,24 +77,12 @@ public class OldLuceneVersions extends Plugin implements IndexStorePlugin, Clust private final SetOnce failShardsListener = new SetOnce<>(); @Override - public Collection createComponents( - final Client client, - final ClusterService clusterService, - final ThreadPool threadPool, - final ResourceWatcherService resourceWatcherService, - final ScriptService scriptService, - final NamedXContentRegistry xContentRegistry, - final Environment environment, - final NodeEnvironment nodeEnvironment, - final NamedWriteableRegistry registry, - final IndexNameExpressionResolver resolver, - final Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); + this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); - if (DiscoveryNode.isMasterNode(environment.settings())) { + if (DiscoveryNode.isMasterNode(services.environment().settings())) { // We periodically look through the indices and identify if there are any archive indices, // then marking the feature as used. We do this on each master node so that if one master fails, the // continue reporting usage state. diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java new file mode 100644 index 0000000000000..308e377c2826a --- /dev/null +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetFlameGraphActionIT.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +public class GetFlameGraphActionIT extends ProfilingTestCase { + @Override + protected boolean useOnlyAllEvents() { + return true; + } + + public void testGetStackTracesUnfiltered() throws Exception { + GetStackTracesRequest request = new GetStackTracesRequest(1, null); + GetFlamegraphResponse response = client().execute(GetFlamegraphAction.INSTANCE, request).get(); + // only spot-check top level properties - detailed tests are done in unit tests + assertEquals(4, response.getSize()); + assertEquals(1.0d, response.getSamplingRate(), 0.001d); + assertEquals(3, response.getSelfCPU()); + assertEquals(4, response.getTotalCPU()); + assertEquals(1, response.getTotalSamples()); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java new file mode 100644 index 0000000000000..32273d56d0176 --- /dev/null +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/FrameGroupID.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.common.Strings; + +import java.util.Objects; + +public final class FrameGroupID { + private static final char UNIX_PATH_SEPARATOR = '/'; + + private FrameGroupID() {} + + public static String getBasenameAndParent(String fullPath) { + if (Strings.isEmpty(fullPath)) { + return fullPath; + } + int lastSeparatorIdx = fullPath.lastIndexOf(UNIX_PATH_SEPARATOR); + if (lastSeparatorIdx <= 0) { + return fullPath; + } + int nextSeparatorIdx = fullPath.lastIndexOf(UNIX_PATH_SEPARATOR, lastSeparatorIdx - 1); + return nextSeparatorIdx == -1 ? fullPath : fullPath.substring(nextSeparatorIdx + 1); + } + + public static String create(String fileId, Integer addressOrLine, String exeFilename, String sourceFilename, String functionName) { + if (Strings.isEmpty(functionName)) { + return Integer.toString(Objects.hash(fileId, addressOrLine)); + } + if (Strings.isEmpty(sourceFilename)) { + return Integer.toString(Objects.hash(fileId, functionName)); + } + return Integer.toString(Objects.hash(exeFilename, functionName, getBasenameAndParent(sourceFilename))); + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java index c006b52c5ed27..d357971e68b1f 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java @@ -23,6 +23,9 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXContentObject { private final int size; private final double samplingRate; + private final int selfCPU; + private final int totalCPU; + private final long totalSamples; private final List> edges; private final List fileIds; private final List frameTypes; @@ -51,6 +54,9 @@ public GetFlamegraphResponse(StreamInput in) throws IOException { this.sourceLines = in.readCollectionAsList(StreamInput::readInt); this.countInclusive = in.readCollectionAsList(StreamInput::readInt); this.countExclusive = in.readCollectionAsList(StreamInput::readInt); + this.selfCPU = in.readInt(); + this.totalCPU = in.readInt(); + this.totalSamples = in.readLong(); } public GetFlamegraphResponse( @@ -67,7 +73,10 @@ public GetFlamegraphResponse( List sourceFileNames, List sourceLines, List countInclusive, - List countExclusive + List countExclusive, + int selfCPU, + int totalCPU, + long totalSamples ) { this.size = size; this.samplingRate = samplingRate; @@ -83,6 +92,9 @@ public GetFlamegraphResponse( this.sourceLines = sourceLines; this.countInclusive = countInclusive; this.countExclusive = countExclusive; + this.selfCPU = selfCPU; + this.totalCPU = totalCPU; + this.totalSamples = totalSamples; } @Override @@ -101,6 +113,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeCollection(this.sourceLines, StreamOutput::writeInt); out.writeCollection(this.countInclusive, StreamOutput::writeInt); out.writeCollection(this.countExclusive, StreamOutput::writeInt); + out.writeInt(this.selfCPU); + out.writeInt(this.totalCPU); + out.writeLong(this.totalSamples); } public int getSize() { @@ -159,6 +174,18 @@ public List getSourceLines() { return sourceLines; } + public int getSelfCPU() { + return selfCPU; + } + + public int getTotalCPU() { + return totalCPU; + } + + public long getTotalSamples() { + return totalSamples; + } + @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat( @@ -187,6 +214,9 @@ public Iterator toXContentChunked(ToXContent.Params params ChunkedToXContentHelper.array("CountExclusive", Iterators.map(countExclusive.iterator(), e -> (b, p) -> b.value(e))), Iterators.single((b, p) -> b.field("Size", size)), Iterators.single((b, p) -> b.field("SamplingRate", samplingRate)), + Iterators.single((b, p) -> b.field("SelfCPU", selfCPU)), + Iterators.single((b, p) -> b.field("TotalCPU", totalCPU)), + Iterators.single((b, p) -> b.field("TotalSamples", totalSamples)), ChunkedToXContentHelper.endObject() ); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index 4083776f8c4a6..3932e386225c5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -173,7 +173,13 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(query, sampleSize); + // The object representation of `query` may use Lucene's ByteRef to represent values. This class' hashCode implementation + // uses StringUtils.GOOD_FAST_HASH_SEED which is reinitialized for each JVM. This means that hashcode is consistent *within* + // a JVM but will not be consistent across the cluster. As we use hashCode e.g. to initialize the random number generator in + // Resampler to produce a consistent downsampling results, relying on the default hashCode implementation of `query` will + // produce consistent results per node but not across the cluster. To avoid this, we produce the hashCode based on the + // string representation instead, which will produce consistent results for the entire cluster and across node restarts. + return Objects.hash(Objects.toString(query, "null"), sampleSize); } @Override diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java index c25733bf8587c..39dd7cd611e64 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java @@ -33,6 +33,7 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX private final Map stackTraceEvents; private final int totalFrames; private final double samplingRate; + private final long totalSamples; public GetStackTracesResponse(StreamInput in) throws IOException { this.stackTraces = in.readBoolean() @@ -59,6 +60,7 @@ public GetStackTracesResponse(StreamInput in) throws IOException { this.stackTraceEvents = in.readBoolean() ? in.readMap(StreamInput::readInt) : null; this.totalFrames = in.readInt(); this.samplingRate = in.readDouble(); + this.totalSamples = in.readLong(); } public GetStackTracesResponse( @@ -67,7 +69,8 @@ public GetStackTracesResponse( Map executables, Map stackTraceEvents, int totalFrames, - double samplingRate + double samplingRate, + long totalSamples ) { this.stackTraces = stackTraces; this.stackFrames = stackFrames; @@ -75,6 +78,7 @@ public GetStackTracesResponse( this.stackTraceEvents = stackTraceEvents; this.totalFrames = totalFrames; this.samplingRate = samplingRate; + this.totalSamples = totalSamples; } @Override @@ -115,6 +119,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeInt(totalFrames); out.writeDouble(samplingRate); + out.writeLong(totalSamples); } public Map getStackTraces() { @@ -141,6 +146,10 @@ public double getSamplingRate() { return samplingRate; } + public long getTotalSamples() { + return totalSamples; + } + @Override public Iterator toXContentChunked(ToXContent.Params params) { return Iterators.concat( @@ -151,7 +160,11 @@ public Iterator toXContentChunked(ToXContent.Params params optional("stack_trace_events", stackTraceEvents, ChunkedToXContentHelper::map), Iterators.single((b, p) -> b.field("total_frames", totalFrames)), Iterators.single((b, p) -> b.field("sampling_rate", samplingRate)), - // start and end are intentionally not written to the XContent representation because we only need them on the transport layer + // the following fields are intentionally not written to the XContent representation (only needed on the transport layer): + // + // * start + // * end + // * totalSamples ChunkedToXContentHelper.endObject() ); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java index 748424386457f..a09d162c32967 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/IndexStateResolver.java @@ -16,7 +16,7 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import java.util.List; import java.util.Map; @@ -52,7 +52,7 @@ public IndexState getIndexState(Cluster logger.trace("Index [{}] health status is RED, any pending mapping upgrades will wait until this changes", metadata.getIndex()); return new IndexState<>(index, metadata.getIndex(), IndexStatus.UNHEALTHY); } - if (checkOutdatedIndices && metadata.getCreationVersion().before(IndexVersion.V_8_9_1)) { + if (checkOutdatedIndices && metadata.getCreationVersion().before(IndexVersions.V_8_9_1)) { logger.trace( "Index [{}] has been created before version 8.9.1 and must be deleted before proceeding with the upgrade.", metadata.getIndex() diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java index 746159c23dda0..00a57faa85401 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexManager.java @@ -42,6 +42,7 @@ class ProfilingIndexManager extends AbstractProfilingPersistenceManager { // For testing public static final List PROFILING_INDICES = List.of( + ProfilingIndex.regular("profiling-costs", ProfilingIndexTemplateRegistry.PROFILING_COSTS_VERSION, OnVersionBump.KEEP_OLD), ProfilingIndex.regular( "profiling-returnpads-private", ProfilingIndexTemplateRegistry.PROFILING_RETURNPADS_PRIVATE_VERSION, diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index 36dc280fc1a74..571fe6bd803fc 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -55,6 +55,7 @@ public class ProfilingIndexTemplateRegistry extends IndexTemplateRegistry { public static final int PROFILING_RETURNPADS_PRIVATE_VERSION = 1; public static final int PROFILING_SQ_EXECUTABLES_VERSION = 1; public static final int PROFILING_SQ_LEAFFRAMES_VERSION = 1; + public static final int PROFILING_COSTS_VERSION = 1; public static final String PROFILING_TEMPLATE_VERSION_VARIABLE = "xpack.profiling.template.version"; @@ -232,6 +233,13 @@ protected Map getComponentTemplateConfigs() { PROFILING_TEMPLATE_VERSION_VARIABLE ), // templates for regular indices + new IndexTemplateConfig( + "profiling-costs", + "/profiling/index-template/profiling-costs.json", + INDEX_TEMPLATE_VERSION, + PROFILING_TEMPLATE_VERSION_VARIABLE, + indexVersion("costs", PROFILING_COSTS_VERSION) + ), new IndexTemplateConfig( "profiling-returnpads-private", "/profiling/index-template/profiling-returnpads-private.json", diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index d37a5be7543bd..f98c22b3bde30 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -15,30 +15,20 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; @@ -81,24 +71,13 @@ public ProfilingPlugin(Settings settings) { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); + logger.info("Profiling is {}", enabled ? "enabled" : "disabled"); - registry.set(new ProfilingIndexTemplateRegistry(settings, clusterService, threadPool, client, xContentRegistry)); + registry.set(new ProfilingIndexTemplateRegistry(settings, clusterService, threadPool, client, services.xContentRegistry())); indexStateResolver.set(new IndexStateResolver(PROFILING_CHECK_OUTDATED_INDICES.get(settings))); clusterService.getClusterSettings().addSettingsUpdateConsumer(PROFILING_CHECK_OUTDATED_INDICES, this::updateCheckOutdatedIndices); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java index 97e5cdf5a74fa..f26a6b1fb3a84 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java @@ -17,11 +17,9 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -74,7 +72,11 @@ public void onFailure(Exception e) { } static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { - FlamegraphBuilder builder = new FlamegraphBuilder(response.getTotalFrames(), response.getSamplingRate()); + FlamegraphBuilder builder = new FlamegraphBuilder( + response.getTotalSamples(), + response.getTotalFrames(), + response.getSamplingRate() + ); if (response.getTotalFrames() == 0) { return builder.build(); } @@ -98,7 +100,7 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { String executable = response.getExecutables().getOrDefault(fileId, ""); for (Frame frame : stackFrame.frames()) { - String frameGroupId = createFrameGroupId(fileId, addressOrLine, executable, frame.fileName(), frame.functionName()); + String frameGroupId = FrameGroupID.create(fileId, addressOrLine, executable, frame.fileName(), frame.functionName()); int nodeId; if (builder.isExists(frameGroupId)) { @@ -130,37 +132,12 @@ static GetFlamegraphResponse buildFlamegraph(GetStackTracesResponse response) { return builder.build(); } - @SuppressForbidden(reason = "Using pathSeparator constant to extract the filename with low overhead") - private static String getFilename(String fullPath) { - if (fullPath == null || fullPath.isEmpty()) { - return fullPath; - } - int lastSeparatorIdx = fullPath.lastIndexOf(File.pathSeparator); - return lastSeparatorIdx == -1 ? fullPath : fullPath.substring(lastSeparatorIdx + 1); - } - - private static String createFrameGroupId( - String fileId, - Integer addressOrLine, - String exeFilename, - String sourceFilename, - String functionName - ) { - StringBuilder sb = new StringBuilder(); - if (functionName.isEmpty()) { - sb.append(fileId); - sb.append(addressOrLine); - } else { - sb.append(exeFilename); - sb.append(functionName); - sb.append(getFilename(sourceFilename)); - } - return sb.toString(); - } - private static class FlamegraphBuilder { private int currentNode = 0; private int size = 0; + private int selfCPU; + private int totalCPU; + private final long totalSamples; // Map: FrameGroupId -> NodeId private final List> edges; private final List fileIds; @@ -176,7 +153,7 @@ private static class FlamegraphBuilder { private final List countExclusive; private final double samplingRate; - FlamegraphBuilder(int frames, double samplingRate) { + FlamegraphBuilder(long totalSamples, int frames, double samplingRate) { // as the number of frames does not account for inline frames we slightly overprovision. int capacity = (int) (frames * 1.1d); this.edges = new ArrayList<>(capacity); @@ -191,6 +168,7 @@ private static class FlamegraphBuilder { this.sourceLines = new ArrayList<>(capacity); this.countInclusive = new ArrayList<>(capacity); this.countExclusive = new ArrayList<>(capacity); + this.totalSamples = totalSamples; // always insert root node int nodeId = this.addNode("", 0, false, "", 0, "", 0, "", 0, 0, null); this.setCurrentNode(nodeId); @@ -223,6 +201,7 @@ public int addNode( this.sourceFileNames.add(sourceFileName); this.sourceLines.add(sourceLine); this.countInclusive.add(samples); + this.totalCPU += samples; this.countExclusive.add(0); if (frameGroupId != null) { this.edges.get(currentNode).put(frameGroupId, node); @@ -246,11 +225,13 @@ public int getNodeId(String frameGroupId) { public void addSamplesInclusive(int nodeId, int sampleCount) { Integer priorSampleCount = this.countInclusive.get(nodeId); this.countInclusive.set(nodeId, priorSampleCount + sampleCount); + this.totalCPU += sampleCount; } public void addSamplesExclusive(int nodeId, int sampleCount) { Integer priorSampleCount = this.countExclusive.get(nodeId); this.countExclusive.set(nodeId, priorSampleCount + sampleCount); + this.selfCPU += sampleCount; } public GetFlamegraphResponse build() { @@ -268,7 +249,10 @@ public GetFlamegraphResponse build() { sourceFileNames, sourceLines, countInclusive, - countExclusive + countExclusive, + selfCPU, + totalCPU, + totalSamples ); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java index 113b600f7702b..e15792adc489d 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetStackTracesAction.java @@ -79,11 +79,13 @@ public class TransportGetStackTracesAction extends HandledTransportActionelfInfoCacheTTL for executables (default: 6 hours) and traceExpirationTimeout for stack + * traces (default: 3 hours). */ public static final Setting PROFILING_KV_INDEX_OVERLAP = Setting.positiveTimeSetting( "xpack.profiling.kv_index.overlap", - TimeValue.timeValueHours(3), + TimeValue.timeValueHours(6), Setting.Property.NodeScope ); @@ -206,6 +208,7 @@ private void searchEventGroupByStackTrace( stackTraceEvents.put(bucket.getKeyAsString(), finalCount); } } + responseBuilder.setTotalSamples(totalFinalCount); log.debug( "Found [{}] stacktrace events, resampled with sample rate [{}] to [{}] events ([{}] unique stack traces).", totalCount, @@ -500,6 +503,7 @@ private static class GetStackTracesResponseBuilder { private Map executables; private Map stackTraceEvents; private double samplingRate; + private long totalSamples; public void setStackTraces(Map stackTraces) { this.stackTraces = stackTraces; @@ -545,8 +549,20 @@ public void setSampleRate(double rate) { this.samplingRate = rate; } + public void setTotalSamples(long totalSamples) { + this.totalSamples = totalSamples; + } + public GetStackTracesResponse build() { - return new GetStackTracesResponse(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, samplingRate); + return new GetStackTracesResponse( + stackTraces, + stackFrames, + executables, + stackTraceEvents, + totalFrames, + samplingRate, + totalSamples + ); } } } diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java new file mode 100644 index 0000000000000..50cfdd28a98fc --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/FrameGroupIDTests.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Objects; + +public class FrameGroupIDTests extends ESTestCase { + public void testEmptySourceFilename() { + String given = FrameGroupID.getBasenameAndParent(""); + assertEquals("", given); + } + + public void testNonPathSourceFilename() { + String given = FrameGroupID.getBasenameAndParent("void jdk.internal.misc.Unsafe.park(boolean, long)"); + assertEquals("void jdk.internal.misc.Unsafe.park(boolean, long)", given); + } + + public void testRootSourceFilename() { + String given = FrameGroupID.getBasenameAndParent("/"); + assertEquals("/", given); + } + + public void testRelativePathSourceFilename() { + String given = FrameGroupID.getBasenameAndParent("../src/main.c"); + assertEquals("src/main.c", given); + } + + public void testAbsolutePathSourceFilename() { + String given = FrameGroupID.getBasenameAndParent("/usr/local/go/src/runtime/lock_futex.go"); + assertEquals("runtime/lock_futex.go", given); + } + + public void testEmptyFunctionName() { + String expected = Integer.toString(Objects.hash("FEDCBA9876543210", 177863)); + String given = FrameGroupID.create("FEDCBA9876543210", 177863, "", "", ""); + assertEquals(expected, given); + } + + public void testFunctionNameAndEmptySourceFilename() { + String expected = Integer.toString(Objects.hash("FEDCBA9876543210", "void jdk.internal.misc.Unsafe.park(boolean, long)")); + String given = FrameGroupID.create("FEDCBA9876543210", 6694, "
    ", "", "void jdk.internal.misc.Unsafe.park(boolean, long)"); + assertEquals(expected, given); + } + + public void testFunctionNameAndSourceFilenameWithAbsolutePath() { + String expected = Integer.toString( + Objects.hash("main", "futex_wake", FrameGroupID.getBasenameAndParent("/usr/local/go/src/runtime/lock_futex.go")) + ); + String given = FrameGroupID.create("FEDCBA9876543210", 64, "main", "/usr/local/go/src/runtime/lock_futex.go", "futex_wake"); + assertEquals(expected, given); + } + + public void testFunctionNameAndSourceFilenameWithoutAbsolutePath() { + String expected = Integer.toString( + Objects.hash("
    ", "void jdk.internal.misc.Unsafe.park(boolean, long)", FrameGroupID.getBasenameAndParent("bootstrap.java")) + ); + String given = FrameGroupID.create( + "FEDCBA9876543210", + 29338, + "
    ", + "bootstrap.java", + "void jdk.internal.misc.Unsafe.park(boolean, long)" + ); + assertEquals(expected, given); + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java index c468d88cec4a7..566c4d24fc088 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java @@ -42,9 +42,10 @@ protected GetStackTracesResponse createTestInstance() { ) ); Map executables = randomNullable(Map.of("QCCDqjSg3bMK1C4YRK6Tiw", "libc.so.6")); - Map stackTraceEvents = randomNullable(Map.of(randomAlphaOfLength(12), randomIntBetween(1, 200))); + int totalSamples = randomIntBetween(1, 200); + Map stackTraceEvents = randomNullable(Map.of(randomAlphaOfLength(12), totalSamples)); - return new GetStackTracesResponse(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, 1.0); + return new GetStackTracesResponse(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, 1.0, totalSamples); } @Override diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java index 0b762b5eb45da..e23d003e2f209 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingDataStreamManagerTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ClusterServiceUtils; @@ -191,7 +192,7 @@ public void testThatOutdatedDataStreamIsDetectedIfCheckEnabled() throws Exceptio nodes, IndexMetadata.State.OPEN, // This is an outdated version that requires indices to be deleted upon migration - IndexVersion.V_8_8_2, + IndexVersions.V_8_8_2, true ); @@ -223,7 +224,7 @@ public void testThatOutdatedDataStreamIsIgnoredIfCheckDisabled() throws Exceptio List.of(existingDataStream), nodes, IndexMetadata.State.OPEN, - IndexVersion.V_8_8_2, + IndexVersions.V_8_8_2, true ); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java index 3efd1d4c041f5..923269646d4d1 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ProfilingIndexManagerTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ClusterServiceUtils; @@ -191,7 +192,7 @@ public void testThatOutdatedIndexIsDetectedIfCheckEnabled() throws Exception { nodes, IndexMetadata.State.OPEN, // This is an outdated version that requires indices to be deleted upon migration - IndexVersion.V_8_8_2, + IndexVersions.V_8_8_2, true ); @@ -223,7 +224,7 @@ public void testThatOutdatedIndexIsIgnoredIfCheckDisabled() throws Exception { List.of(existingIndex), nodes, IndexMetadata.State.OPEN, - IndexVersion.V_8_8_2, + IndexVersions.V_8_8_2, true ); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java index 79585986c64e2..07d9b60b31ff7 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/ResamplerTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.profiling; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.test.ESTestCase; import java.util.random.RandomGenerator; @@ -67,6 +69,27 @@ public void testResamplingNoSampleRateAdjustment() { assertEquals(20_000, resampler.adjustSampleCount(actualSamplesSingleTrace)); } + public void testResamplingNoSampleRateAdjustmentWithQuery() { + double sampleRate = 1.0d; + int requestedSamples = 1; + int actualTotalSamples = 200; + // there is only one event + int actualSamplesSingleTrace = 200; + + GetStackTracesRequest request = new GetStackTracesRequest( + requestedSamples, + new BoolQueryBuilder().filter( + new RangeQueryBuilder("@timestamp").lt("2023-10-19 15:33:00").gte("2023-10-19 15:31:52").format("yyyy-MM-dd HH:mm:ss") + ) + ); + + request.setAdjustSampleCount(false); + // use the real resampler here to ensure we have a stable seed even for complex queries + Resampler resampler = new Resampler(request, sampleRate, actualTotalSamples); + + assertEquals(200, resampler.adjustSampleCount(actualSamplesSingleTrace)); + } + public void testResamplingAndSampleRateAdjustment() { // corresponds to profiling-events-5pow01 double sampleRate = 1.0d / Math.pow(5.0d, 1); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java index 59eb6889f02a1..4b6aef4f544f9 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/RestGetStackTracesActionTests.java @@ -48,7 +48,8 @@ public void testPrepareEmptyRequest() { Collections.emptyMap(), Collections.emptyMap(), 0, - 1.0 + 1.0, + 0 ); }); RestRequest profilingRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) @@ -73,7 +74,8 @@ public void testPrepareParameterizedRequest() { Collections.emptyMap(), Collections.emptyMap(), 0, - 0.0 + 0.0, + 0 ); }); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST) diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java index d931e4e17d5b4..7b3a572c918de 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphActionTests.java @@ -48,7 +48,8 @@ public void testCreateFlamegraph() { Map.of("fr28zxcZ2UDasxYuu6dV-w", "containerd"), Map.of("2buqP1GpF-TXYmL4USW8gA", 1), 9, - 1.0d + 1.0d, + 1 ); GetFlamegraphResponse response = TransportGetFlamegraphAction.buildFlamegraph(stacktraces); assertNotNull(response); @@ -58,15 +59,15 @@ public void testCreateFlamegraph() { assertEquals(List.of(0, 0, 0, 0, 0, 0, 0, 0, 0, 1), response.getCountExclusive()); assertEquals( List.of( - Map.of("fr28zxcZ2UDasxYuu6dV-w12784352", 1), - Map.of("fr28zxcZ2UDasxYuu6dV-w19334053", 2), - Map.of("fr28zxcZ2UDasxYuu6dV-w19336161", 3), - Map.of("fr28zxcZ2UDasxYuu6dV-w18795859", 4), - Map.of("fr28zxcZ2UDasxYuu6dV-w18622708", 5), - Map.of("fr28zxcZ2UDasxYuu6dV-w18619213", 6), - Map.of("fr28zxcZ2UDasxYuu6dV-w12989721", 7), - Map.of("fr28zxcZ2UDasxYuu6dV-w13658842", 8), - Map.of("fr28zxcZ2UDasxYuu6dV-w16339645", 9), + Map.of("174640828", 1), + Map.of("181190529", 2), + Map.of("181192637", 3), + Map.of("180652335", 4), + Map.of("180479184", 5), + Map.of("180475689", 6), + Map.of("174846197", 7), + Map.of("175515318", 8), + Map.of("178196121", 9), Map.of() ), response.getEdges() @@ -111,10 +112,14 @@ public void testCreateFlamegraph() { assertEquals(List.of(0, 0, 0, 0, 0, 0, 0, 0, 0, 0), response.getFunctionOffsets()); assertEquals(List.of("", "", "", "", "", "", "", "", "", ""), response.getSourceFileNames()); assertEquals(List.of(0, 0, 0, 0, 0, 0, 0, 0, 0, 0), response.getSourceLines()); + assertEquals(1, response.getSelfCPU()); + assertEquals(10, response.getTotalCPU()); + assertEquals(1L, response.getTotalSamples()); + } public void testCreateEmptyFlamegraphWithRootNode() { - GetStackTracesResponse stacktraces = new GetStackTracesResponse(Map.of(), Map.of(), Map.of(), Map.of(), 0, 1.0d); + GetStackTracesResponse stacktraces = new GetStackTracesResponse(Map.of(), Map.of(), Map.of(), Map.of(), 0, 1.0d, 0); GetFlamegraphResponse response = TransportGetFlamegraphAction.buildFlamegraph(stacktraces); assertNotNull(response); assertEquals(1, response.getSize()); @@ -131,5 +136,8 @@ public void testCreateEmptyFlamegraphWithRootNode() { assertEquals(List.of(0), response.getFunctionOffsets()); assertEquals(List.of(""), response.getSourceFileNames()); assertEquals(List.of(0), response.getSourceLines()); + assertEquals(0, response.getSelfCPU()); + assertEquals(0, response.getTotalCPU()); + assertEquals(0L, response.getTotalSamples()); } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RegexMatch.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RegexMatch.java index b7f50100c60db..439bba43b1348 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RegexMatch.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RegexMatch.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Nullability; import org.elasticsearch.xpack.ql.expression.function.scalar.UnaryScalarFunction; @@ -69,6 +70,9 @@ public boolean foldable() { @Override public Boolean fold() { Object val = field().fold(); + if (val instanceof BytesRef br) { + val = br.utf8ToString(); + } return RegexProcessor.RegexOperation.match(val, pattern().asJavaRegex()); } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java index b9fa092868592..4a31309ac8f2f 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/plan/logical/EsRelation.java @@ -34,7 +34,7 @@ public EsRelation(Source source, EsIndex index, List attributes) { this(source, index, attributes, false); } - private EsRelation(Source source, EsIndex index, List attributes, boolean frozen) { + public EsRelation(Source source, EsIndex index, List attributes, boolean frozen) { super(source); this.index = index; this.attrs = attributes; @@ -43,7 +43,7 @@ private EsRelation(Source source, EsIndex index, List attributes, boo @Override protected NodeInfo info() { - return NodeInfo.create(this, EsRelation::new, index, frozen); + return NodeInfo.create(this, EsRelation::new, index, attrs, frozen); } private static List flatten(Source source, Map mapping) { diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/LoggingUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/LoggingUtils.java new file mode 100644 index 0000000000000..12102e350d23e --- /dev/null +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/LoggingUtils.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ql.util; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.logging.Level; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.RestStatus; + +public final class LoggingUtils { + + private LoggingUtils() {} + + public static void logOnFailure(Logger logger, Throwable throwable) { + RestStatus status = ExceptionsHelper.status(throwable); + logger.log(status.getStatus() >= 500 ? Level.WARN : Level.DEBUG, () -> "Request failed with status [" + status + "]: ", throwable); + } + +} diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java index db15bfa3c57d5..beebf0d581444 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/StringUtils.java @@ -139,7 +139,8 @@ public static String likeToJavaPattern(String pattern, char escape) { // * -> .* // ? -> . // escape character - can be 0 (in which case no regex gets escaped) or - // should be followed by % or _ (otherwise an exception is thrown) + // should be followed by * or ? or the escape character itself (otherwise an exception is thrown). + // Using * or ? as escape characters should be avoided because it will make it impossible to enter them as literals public static String wildcardToJavaPattern(String pattern, char escape) { StringBuilder regex = new StringBuilder(pattern.length() + 4); @@ -157,7 +158,7 @@ public static String wildcardToJavaPattern(String pattern, char escape) { case '*' -> regex.append(escaped ? "\\*" : ".*"); case '?' -> regex.append(escaped ? "\\?" : "."); default -> { - if (escaped) { + if (escaped && escape != curr) { throw new QlIllegalArgumentException( "Invalid sequence - escape character is not followed by special wildcard char" ); diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/StringUtilsTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/StringUtilsTests.java index ca163cff7b36e..3280a96d246a9 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/StringUtilsTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/util/StringUtilsTests.java @@ -52,4 +52,7 @@ public void testWildcard() { assertEquals("^foo\\*$", wildcardToJavaPattern("foox*", 'x')); } + public void testEscapedEscape() { + assertEquals("^\\\\\\\\$", wildcardToJavaPattern("\\\\\\\\", '\\')); + } } diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNode.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNode.java index 509e09415ec0d..0a39a25dd8b32 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNode.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNode.java @@ -9,10 +9,9 @@ import org.apache.http.HttpHost; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; import org.elasticsearch.core.Nullable; -public record TestNode(String id, Version version, @Nullable TransportVersion transportVersion, HttpHost publishAddress) { +public record TestNode(String id, String version, @Nullable TransportVersion transportVersion, HttpHost publishAddress) { @Override public String toString() { return "Node{" + "id='" + id + '\'' + ", version=" + version + '}'; diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java index af853670a8eb9..25368fa4e15bd 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestNodes.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.ql; import org.elasticsearch.TransportVersion; -import org.elasticsearch.Version; import java.util.Comparator; import java.util.HashMap; @@ -18,25 +17,22 @@ public final class TestNodes extends HashMap { + private final String bwcNodesVersion; + + TestNodes(String bwcNodesVersion) { + this.bwcNodesVersion = bwcNodesVersion; + } + public void add(TestNode node) { put(node.id(), node); } public List getNewNodes() { - Version bwcVersion = getBWCVersion(); - return values().stream().filter(n -> n.version().after(bwcVersion)).collect(Collectors.toList()); + return values().stream().filter(n -> n.version().equals(bwcNodesVersion) == false).collect(Collectors.toList()); } public List getBWCNodes() { - Version bwcVersion = getBWCVersion(); - return values().stream().filter(n -> n.version().equals(bwcVersion)).collect(Collectors.toList()); - } - - public Version getBWCVersion() { - if (isEmpty()) { - throw new IllegalStateException("no nodes available"); - } - return values().stream().map(TestNode::version).min(Comparator.naturalOrder()).get(); + return values().stream().filter(n -> n.version().equals(bwcNodesVersion)).collect(Collectors.toList()); } public TransportVersion getBWCTransportVersion() { diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java index 8fc0963a8f210..a395ac7766b0a 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java @@ -295,22 +295,27 @@ public static Tuple pathAndName(String string) { return new Tuple<>(folder, file); } - public static TestNodes buildNodeAndVersions(RestClient client) throws IOException { + public static TestNodes buildNodeAndVersions(RestClient client, String bwcNodesVersion) throws IOException { Response response = client.performRequest(new Request("GET", "_nodes")); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map nodesAsMap = objectPath.evaluate("nodes"); - TestNodes nodes = new TestNodes(); + TestNodes nodes = new TestNodes(bwcNodesVersion); for (String id : nodesAsMap.keySet()) { - Version nodeVersion = Version.fromString(objectPath.evaluate("nodes." + id + ".version")); + String nodeVersion = objectPath.evaluate("nodes." + id + ".version"); Object tvField; TransportVersion transportVersion = null; - if (nodeVersion.before(Version.V_8_8_0)) { - transportVersion = TransportVersion.fromId(nodeVersion.id); // no transport_version field - } else if ((tvField = objectPath.evaluate("nodes." + id + ".transport_version")) != null) { + if ((tvField = objectPath.evaluate("nodes." + id + ".transport_version")) != null) { // this json might be from a node <8.8.0, but about a node >=8.8.0 // in which case the transport_version field won't exist. Just ignore it for now. transportVersion = TransportVersion.fromString(tvField.toString()); + } else { // no transport_version field + // this json might be from a node <8.8.0, but about a node >=8.8.0 + // In that case the transport_version field won't exist. Just ignore it for now. + Version version = Version.fromString(nodeVersion); + if (version.before(Version.V_8_8_0)) { + transportVersion = TransportVersion.fromId(version.id); + } } nodes.add( diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java index 640d4ca2f9392..c5fac43723b70 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankCoordinatorCanMatchIT.java @@ -130,8 +130,7 @@ public void testCanMatchCoordinator() throws Exception { }); // match 2 separate shard with no overlap in queries - SearchResponse response = client().prepareSearch("time_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("time_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -150,8 +149,7 @@ public void testCanMatchCoordinator() throws Exception { assertEquals(3, response.getSkippedShards()); // match 2 shards with overlap in queries - response = client().prepareSearch("time_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("time_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -170,8 +168,7 @@ public void testCanMatchCoordinator() throws Exception { assertEquals(3, response.getSkippedShards()); // match one shard with one query in range and one query out of range - response = client().prepareSearch("time_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("time_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -190,8 +187,7 @@ public void testCanMatchCoordinator() throws Exception { assertEquals(4, response.getSkippedShards()); // match no shards, but still use one to generate a search response - response = client().prepareSearch("time_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("time_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -210,8 +206,7 @@ public void testCanMatchCoordinator() throws Exception { assertEquals(4, response.getSkippedShards()); // match one shard with with no overlap in queries - response = client().prepareSearch("time_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("time_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -230,8 +225,7 @@ public void testCanMatchCoordinator() throws Exception { assertEquals(4, response.getSkippedShards()); // match one shard with exact overlap in queries - response = client().prepareSearch("time_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("time_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index 63afb47273cb1..3db050e071aa7 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -139,8 +139,7 @@ public void setupSuiteScopeCluster() throws Exception { public void testTotalDocsSmallerThanSize() { float[] queryVector = { 0.0f }; KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector", queryVector, 3, 3, null); - SearchResponse response = client().prepareSearch("tiny_index") - .setRankBuilder(new RRFRankBuilder(100, 1)) + SearchResponse response = prepareSearch("tiny_index").setRankBuilder(new RRFRankBuilder(100, 1)) .setKnnSearch(List.of(knnSearch)) .setQuery(QueryBuilders.termQuery("text", "term")) .addFetchField("vector") @@ -170,8 +169,7 @@ public void testTotalDocsSmallerThanSize() { public void testBM25AndKnn() { float[] queryVector = { 500.0f }; KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(101, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(101, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearch)) .setQuery( @@ -211,8 +209,7 @@ public void testMultipleOnlyKnn() { float[] queryVectorDesc = { 500.0f }; KnnSearchBuilder knnSearchAsc = new KnnSearchBuilder("vector_asc", queryVectorAsc, 51, 1001, null); KnnSearchBuilder knnSearchDesc = new KnnSearchBuilder("vector_desc", queryVectorDesc, 51, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(51, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(51, 1)) .setTrackTotalHits(true) .setKnnSearch(List.of(knnSearchAsc, knnSearchDesc)) .addFetchField("vector_asc") @@ -262,8 +259,7 @@ public void testBM25AndMultipleKnn() { float[] queryVectorDesc = { 500.0f }; KnnSearchBuilder knnSearchAsc = new KnnSearchBuilder("vector_asc", queryVectorAsc, 51, 1001, null); KnnSearchBuilder knnSearchDesc = new KnnSearchBuilder("vector_desc", queryVectorDesc, 51, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(51, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(51, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearchAsc, knnSearchDesc)) .setQuery( @@ -332,8 +328,7 @@ public void testBM25AndMultipleKnn() { public void testBM25AndKnnWithBucketAggregation() { float[] queryVector = { 500.0f }; KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(101, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(101, 1)) .setTrackTotalHits(true) .setKnnSearch(List.of(knnSearch)) .setQuery( @@ -389,8 +384,7 @@ public void testMultipleOnlyKnnWithAggregation() { float[] queryVectorDesc = { 500.0f }; KnnSearchBuilder knnSearchAsc = new KnnSearchBuilder("vector_asc", queryVectorAsc, 51, 1001, null); KnnSearchBuilder knnSearchDesc = new KnnSearchBuilder("vector_desc", queryVectorDesc, 51, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(51, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(51, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearchAsc, knnSearchDesc)) .addFetchField("vector_asc") @@ -456,8 +450,7 @@ public void testBM25AndMultipleKnnWithAggregation() { float[] queryVectorDesc = { 500.0f }; KnnSearchBuilder knnSearchAsc = new KnnSearchBuilder("vector_asc", queryVectorAsc, 51, 1001, null); KnnSearchBuilder knnSearchDesc = new KnnSearchBuilder("vector_desc", queryVectorDesc, 51, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(51, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(51, 1)) .setTrackTotalHits(true) .setKnnSearch(List.of(knnSearchAsc, knnSearchDesc)) .setQuery( @@ -542,8 +535,7 @@ public void testBM25AndMultipleKnnWithAggregation() { public void testMultiBM25() { for (SearchType searchType : SearchType.CURRENTLY_SUPPORTED) { - SearchResponse response = client().prepareSearch("nrd_index") - .setSearchType(searchType) + SearchResponse response = prepareSearch("nrd_index").setSearchType(searchType) .setRankBuilder(new RRFRankBuilder(8, 1)) .setTrackTotalHits(false) .setSubSearches( @@ -612,8 +604,7 @@ public void testMultiBM25() { public void testMultiBM25WithAggregation() { for (SearchType searchType : SearchType.CURRENTLY_SUPPORTED) { - SearchResponse response = client().prepareSearch("nrd_index") - .setSearchType(searchType) + SearchResponse response = prepareSearch("nrd_index").setSearchType(searchType) .setRankBuilder(new RRFRankBuilder(8, 1)) .setTrackTotalHits(false) .setSubSearches( @@ -699,8 +690,7 @@ public void testMultiBM25WithAggregation() { public void testMultiBM25AndSingleKnn() { float[] queryVector = { 500.0f }; KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(101, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(101, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearch)) .setSubSearches( @@ -756,8 +746,7 @@ public void testMultiBM25AndSingleKnn() { public void testMultiBM25AndSingleKnnWithAggregation() { float[] queryVector = { 500.0f }; KnnSearchBuilder knnSearch = new KnnSearchBuilder("vector_asc", queryVector, 101, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(101, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(101, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearch)) .setSubSearches( @@ -831,8 +820,7 @@ public void testMultiBM25AndMultipleKnn() { float[] queryVectorDesc = { 500.0f }; KnnSearchBuilder knnSearchAsc = new KnnSearchBuilder("vector_asc", queryVectorAsc, 101, 1001, null); KnnSearchBuilder knnSearchDesc = new KnnSearchBuilder("vector_desc", queryVectorDesc, 101, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(101, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(101, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearchAsc, knnSearchDesc)) .setSubSearches( @@ -892,8 +880,7 @@ public void testMultiBM25AndMultipleKnnWithAggregation() { float[] queryVectorDesc = { 500.0f }; KnnSearchBuilder knnSearchAsc = new KnnSearchBuilder("vector_asc", queryVectorAsc, 101, 1001, null); KnnSearchBuilder knnSearchDesc = new KnnSearchBuilder("vector_desc", queryVectorDesc, 101, 1001, null); - SearchResponse response = client().prepareSearch("nrd_index") - .setRankBuilder(new RRFRankBuilder(101, 1)) + SearchResponse response = prepareSearch("nrd_index").setRankBuilder(new RRFRankBuilder(101, 1)) .setTrackTotalHits(false) .setKnnSearch(List.of(knnSearchAsc, knnSearchDesc)) .setSubSearches( diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java index a7019d76cdcd1..0e51958ea164e 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankShardCanMatchIT.java @@ -144,8 +144,7 @@ public void testCanMatchShard() throws IOException { indicesAdmin().prepareRefresh("value_index").get(); // match 2 separate shard with no overlap in queries - SearchResponse response = client().prepareSearch("value_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + SearchResponse response = prepareSearch("value_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -164,8 +163,7 @@ public void testCanMatchShard() throws IOException { assertEquals(3, response.getSkippedShards()); // match one shard with one query and do not match the other shard with one query - response = client().prepareSearch("value_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("value_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -184,8 +182,7 @@ public void testCanMatchShard() throws IOException { assertEquals(4, response.getSkippedShards()); // match no shards, but still use one to generate a search response - response = client().prepareSearch("value_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("value_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -204,8 +201,7 @@ public void testCanMatchShard() throws IOException { assertEquals(4, response.getSkippedShards()); // match the same shard for both queries - response = client().prepareSearch("value_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("value_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) @@ -224,8 +220,7 @@ public void testCanMatchShard() throws IOException { assertEquals(4, response.getSkippedShards()); // match one shard with the exact same query - response = client().prepareSearch("value_index") - .setSearchType(SearchType.QUERY_THEN_FETCH) + response = prepareSearch("value_index").setSearchType(SearchType.QUERY_THEN_FETCH) .setPreFilterShardSize(1) .setRankBuilder(new RRFRankBuilder(20, 1)) .setTrackTotalHits(false) diff --git a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java index 571f75a3ca648..9012946abc686 100644 --- a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java +++ b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java @@ -113,8 +113,7 @@ public void testPinnedPromotions() throws Exception { private void assertPinnedPromotions(PinnedQueryBuilder pqb, LinkedHashSet pins, int iter, int numRelevantDocs) { int from = randomIntBetween(0, numRelevantDocs); int size = randomIntBetween(10, 100); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(pqb) + SearchResponse searchResponse = prepareSearch().setQuery(pqb) .setTrackTotalHits(true) .setSize(size) .setFrom(from) @@ -194,11 +193,7 @@ public void testExhaustiveScoring() throws Exception { } private void assertExhaustiveScoring(PinnedQueryBuilder pqb) { - SearchResponse searchResponse = client().prepareSearch() - .setQuery(pqb) - .setTrackTotalHits(true) - .setSearchType(DFS_QUERY_THEN_FETCH) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH).get(); long numHits = searchResponse.getHits().getTotalHits().value; assertThat(numHits, equalTo(2L)); @@ -232,11 +227,7 @@ public void testExplain() throws Exception { } private void assertExplain(PinnedQueryBuilder pqb) { - SearchResponse searchResponse = client().prepareSearch() - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(pqb) - .setExplain(true) - .get(); + SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(pqb).setExplain(true).get(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, hasId("2")); assertSecondHit(searchResponse, hasId("1")); @@ -280,8 +271,7 @@ private void assertHighlight(PinnedQueryBuilder pqb) { HighlightBuilder testHighlighter = new HighlightBuilder(); testHighlighter.field("field1"); - SearchResponse searchResponse = client().prepareSearch() - .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) + SearchResponse searchResponse = prepareSearch().setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(pqb) .highlighter(testHighlighter) .setExplain(true) @@ -340,11 +330,7 @@ public void testMultiIndexDocs() throws Exception { new Item("test1", "b") ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(pqb) - .setTrackTotalHits(true) - .setSearchType(DFS_QUERY_THEN_FETCH) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH).get(); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, both(hasIndex("test2")).and(hasId("a"))); @@ -384,11 +370,7 @@ public void testMultiIndexWithAliases() throws Exception { new Item("test", "a") ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(pqb) - .setTrackTotalHits(true) - .setSearchType(DFS_QUERY_THEN_FETCH) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH).get(); assertHitCount(searchResponse, 3); assertFirstHit(searchResponse, both(hasIndex("test")).and(hasId("b"))); @@ -446,11 +428,7 @@ public void testMultiIndexWithAliasesAndDuplicateIds() throws Exception { new Item("test-alias", "a") ); - SearchResponse searchResponse = client().prepareSearch() - .setQuery(pqb) - .setTrackTotalHits(true) - .setSearchType(DFS_QUERY_THEN_FETCH) - .get(); + SearchResponse searchResponse = prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH).get(); assertHitCount(searchResponse, 4); assertFirstHit(searchResponse, both(hasIndex("test1")).and(hasId("b"))); diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle b/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle index b8c0127d0586a..595d05e05b410 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/rest/build.gradle @@ -1,3 +1,5 @@ +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' @@ -15,12 +17,12 @@ restResources { } tasks.withType(Test).configureEach { - systemProperty 'tests.path.repo', repoDir + nonInputProperties.systemProperty 'tests.path.repo', repoDir } testClusters.configureEach { testDistribution = 'DEFAULT' - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' diff --git a/x-pack/plugin/searchable-snapshots/qa/url/build.gradle b/x-pack/plugin/searchable-snapshots/qa/url/build.gradle index 160eb8d28cbcc..12fc0873958e1 100644 --- a/x-pack/plugin/searchable-snapshots/qa/url/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/url/build.gradle @@ -29,13 +29,13 @@ File repositoryDir = fixture.fsRepositoryDir as File tasks.named("javaRestTest").configure { dependsOn fixture.getTasks().named("postProcessFixture") - systemProperty 'test.url.fs.repo.dir', repositoryDir.absolutePath + nonInputProperties.systemProperty 'test.url.fs.repo.dir', repositoryDir.absolutePath nonInputProperties.systemProperty 'test.url.http', "${-> fixtureAddress('nginx-fixture')}" } testClusters.matching { it.name == "javaRestTest" }.configureEach { testDistribution = 'DEFAULT' - setting 'path.repo', repositoryDir.absolutePath + setting 'path.repo', repositoryDir.absolutePath, IGNORE_VALUE setting 'repositories.url.allowed_urls', { "${-> fixtureAddress('nginx-fixture')}" }, IGNORE_VALUE setting 'xpack.license.self_generated.type', 'trial' diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java index 71c3253b0607b..b07d307f105c0 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/BaseSearchableSnapshotsIntegTestCase.java @@ -289,15 +289,10 @@ protected void assertTotalHits(String indexName, TotalHits originalAllHits, Tota } catch (InterruptedException e) { throw new RuntimeException(e); } - allHits.set(t, client().prepareSearch(indexName).setTrackTotalHits(true).get().getHits().getTotalHits()); + allHits.set(t, prepareSearch(indexName).setTrackTotalHits(true).get().getHits().getTotalHits()); barHits.set( t, - client().prepareSearch(indexName) - .setTrackTotalHits(true) - .setQuery(matchQuery("foo", "bar")) - .get() - .getHits() - .getTotalHits() + prepareSearch(indexName).setTrackTotalHits(true).setQuery(matchQuery("foo", "bar")).get().getHits().getTotalHits() ); }); threads[i].start(); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java index a6ef720e90777..ee1ce56528361 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java @@ -70,7 +70,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.xpack.searchablesnapshots.SearchableSnapshots.SNAPSHOT_RECOVERY_STATE_FACTORY_KEY; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; @@ -427,7 +427,6 @@ public void testRequestCacheOnFrozen() throws Exception { indicesAdmin().prepareCreate("test-index") .setMapping("f", "type=date") .setSettings(indexSettings(1, 0).put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) - .get() ); indexRandom( true, @@ -470,7 +469,7 @@ public void testRequestCacheOnFrozen() throws Exception { dateHistogram("histo").field("f").timeZone(ZoneId.of("+01:00")).minDocCount(0).calendarInterval(DateHistogramInterval.MONTH) ) .get(); - assertSearchResponse(r1); + assertNoFailures(r1); assertRequestCacheState(client(), "test-index", 0, 1); @@ -491,7 +490,7 @@ public void testRequestCacheOnFrozen() throws Exception { .calendarInterval(DateHistogramInterval.MONTH) ) .get(); - assertSearchResponse(r2); + assertNoFailures(r2); assertRequestCacheState(client(), "test-index", i + 1, 1); Histogram h1 = r1.getAggregations().get("histo"); Histogram h2 = r2.getAggregations().get("histo"); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java index 0fda9e4e66f34..39e476107a0d6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java @@ -144,8 +144,7 @@ public void testRetryPointInTime() throws Exception { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(OpenPointInTimeAction.INSTANCE, openRequest).actionGet().getPointInTimeId(); try { - SearchResponse resp = client().prepareSearch() - .setIndices(indexName) + SearchResponse resp = prepareSearch().setIndices(indexName) .setPreference(null) .setPointInTime(new PointInTimeBuilder(pitId)) .get(); @@ -158,8 +157,7 @@ public void testRetryPointInTime() throws Exception { internalCluster().restartNode(allocatedNode); } ensureGreen(indexName); - resp = client().prepareSearch() - .setIndices(indexName) + resp = prepareSearch().setIndices(indexName) .setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) .setSearchType(SearchType.QUERY_THEN_FETCH) .setPreference(null) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 8e414002bfa60..81ea36e88628c 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -478,7 +478,7 @@ public void testMaxRestoreBytesPerSecIsUsed() throws Exception { assertThat(restore.getRestoreInfo().failedShards(), equalTo(0)); ensureGreen(restoredIndexName); - assertHitCount(client().prepareSearch(restoredIndexName).setSize(0), nbDocs); + assertHitCount(prepareSearch(restoredIndexName).setSize(0), nbDocs); final Index restoredIndex = resolveIndex(restoredIndexName); for (String node : internalCluster().getNodeNames()) { diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java index 1e2d288af2fa7..6f71f7c33bf06 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java @@ -87,7 +87,7 @@ public void testSearchableSnapshotRelocationDoNotUseSnapshotBasedRecoveries() th ensureGreen(restoredIndexName); - assertHitCount(client().prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); mockAppender.assertAllExpectationsMatched(); Loggers.removeAppender(logger, mockAppender); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java index d3cdfce0b8175..cb6cf45b641c6 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java @@ -68,7 +68,7 @@ public void testRepositoryUsedBySearchableSnapshotCanBeUpdatedButNotUnregistered Storage storage = randomFrom(Storage.values()); String restoredIndexName = (storage == Storage.FULL_COPY ? "fully-mounted-" : "partially-mounted-") + indexName + '-' + i; mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, Settings.EMPTY, storage); - assertHitCount(client().prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); mountedIndices[i] = restoredIndexName; } @@ -182,7 +182,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(client().prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); final String mountedAgain = randomValueOtherThan(mounted, () -> randomAlphaOfLength(10).toLowerCase(Locale.ROOT)); final SnapshotRestoreException exception = expectThrows( @@ -207,7 +207,7 @@ public void testMountIndexWithDifferentDeletionOfSnapshot() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(client().prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); assertAcked(indicesAdmin().prepareDelete(mountedAgain)); assertAcked(indicesAdmin().prepareDelete(mounted)); @@ -237,7 +237,7 @@ public void testDeletionOfSnapshotSettingCannotBeUpdated() throws Exception { ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(client().prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose(mounted)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java index 7a4a92ac600ac..9847eb101531c 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheIntegTests.java @@ -225,7 +225,7 @@ public void testBlobStoreCache() throws Exception { final long numberOfCacheWrites = indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L; logger.info("--> verifying number of documents in index [{}]", restoredIndex); - assertHitCount(client().prepareSearch(restoredIndex).setSize(0).setTrackTotalHits(true), numberOfDocs); + assertHitCount(prepareSearch(restoredIndex).setSize(0).setTrackTotalHits(true), numberOfDocs); for (IndicesService indicesService : internalCluster().getDataNodeInstances(IndicesService.class)) { for (IndexService indexService : indicesService) { @@ -266,7 +266,7 @@ public void testBlobStoreCache() throws Exception { checkNoBlobStoreAccess(); logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); - assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true), numberOfDocs); + assertHitCount(prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true), numberOfDocs); logger.info("--> verifying that no extra cached blobs were indexed [{}]", SNAPSHOT_BLOB_CACHE_INDEX); refreshSystemIndex(); @@ -322,7 +322,7 @@ public Settings onNodeStopped(String nodeName) throws Exception { assertThat(indexingStats != null ? indexingStats.getTotal().getIndexCount() : 0L, equalTo(0L)); logger.info("--> verifying number of documents in index [{}]", restoredAgainIndex); - assertHitCount(client().prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true), numberOfDocs); + assertHitCount(prepareSearch(restoredAgainIndex).setSize(0).setTrackTotalHits(true), numberOfDocs); logger.info("--> deleting indices, maintenance service should clean up [{}] docs in system index", numberOfCachedBlobs); assertAcked(indicesAdmin().prepareDelete("restored-*")); @@ -360,7 +360,7 @@ private void refreshSystemIndex() { assertThat(refreshResponse.getSuccessfulShards(), greaterThan(0)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); } catch (IndexNotFoundException indexNotFoundException) { - throw new AssertionError("unexpected", indexNotFoundException); + fail(indexNotFoundException); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 5048eace460c6..68b702469d138 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -335,7 +335,7 @@ private void refreshSystemIndex(boolean failIfNotExist) { assertThat(refreshResponse.getSuccessfulShards(), failIfNotExist ? greaterThan(0) : greaterThanOrEqualTo(0)); assertThat(refreshResponse.getFailedShards(), equalTo(0)); } catch (IndexNotFoundException indexNotFoundException) { - throw new AssertionError("unexpected", indexNotFoundException); + fail(indexNotFoundException); } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java index c0d413d09fc5b..50149cf8ca376 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/SearchableSnapshotsPrewarmingIntegTests.java @@ -251,11 +251,13 @@ public void testConcurrentPrewarming() throws Exception { final CountDownLatch startPrewarmingLatch = new CountDownLatch(1); final var threadPool = getInstanceFromNode(ThreadPool.class); final int maxUploadTasks = threadPool.info(CACHE_PREWARMING_THREAD_POOL_NAME).getMax(); + final CountDownLatch maxUploadTasksCreated = new CountDownLatch(maxUploadTasks); for (int i = 0; i < maxUploadTasks; i++) { threadPool.executor(CACHE_PREWARMING_THREAD_POOL_NAME).execute(new AbstractRunnable() { @Override protected void doRun() throws Exception { + maxUploadTasksCreated.countDown(); startPrewarmingLatch.await(); } @@ -265,7 +267,7 @@ public void onFailure(Exception e) { } }); } - + safeAwait(maxUploadTasksCreated); var prewarmingExecutor = threadPool.executor(CACHE_PREWARMING_THREAD_POOL_NAME); assertThat(prewarmingExecutor, instanceOf(ThreadPoolExecutor.class)); assertThat(((ThreadPoolExecutor) prewarmingExecutor).getActiveCount(), equalTo(maxUploadTasks)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java index 28a537c5da9ec..0d1bc7eec94bc 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/NodesCachesStatsIntegTests.java @@ -107,14 +107,11 @@ public void testNodesCachesStats() throws Exception { } for (int i = 0; i < 20; i++) { - client().prepareSearch(mountedIndex) - .setQuery( - randomBoolean() - ? QueryBuilders.rangeQuery("id").gte(randomIntBetween(0, 1000)) - : QueryBuilders.termQuery("test", "value" + randomIntBetween(0, 1000)) - ) - .setSize(randomIntBetween(0, 1000)) - .get(); + prepareSearch(mountedIndex).setQuery( + randomBoolean() + ? QueryBuilders.rangeQuery("id").gte(randomIntBetween(0, 1000)) + : QueryBuilders.termQuery("test", "value" + randomIntBetween(0, 1000)) + ).setSize(randomIntBetween(0, 1000)).get(); } assertExecutorIsIdle(SearchableSnapshots.CACHE_FETCH_ASYNC_THREAD_POOL_NAME); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java index eafe0f5d819e3..2824aa22496a1 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/shared/PartiallyCachedShardAllocationIntegTests.java @@ -271,18 +271,15 @@ public void testPartialSearchableSnapshotDelaysAllocationUntilNodeCacheStatesKno ); // Unblock the other new node, but maybe inject a few errors - final MockTransportService transportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - newNodes.get(0) - ); final Semaphore failurePermits = new Semaphore(between(0, 2)); - transportService.addRequestHandlingBehavior(FrozenCacheInfoNodeAction.NAME, (handler, request, channel, task) -> { - if (failurePermits.tryAcquire()) { - channel.sendResponse(new ElasticsearchException("simulated")); - } else { - handler.messageReceived(request, channel, task); - } - }); + MockTransportService.getInstance(newNodes.get(0)) + .addRequestHandlingBehavior(FrozenCacheInfoNodeAction.NAME, (handler, request, channel, task) -> { + if (failurePermits.tryAcquire()) { + channel.sendResponse(new ElasticsearchException("simulated")); + } else { + handler.messageReceived(request, channel, task); + } + }); cacheInfoBlockGetter.apply(newNodes.get(0)).onResponse(null); final RestoreSnapshotResponse restoreSnapshotResponse = responseFuture.actionGet(10, TimeUnit.SECONDS); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java index acd9ad9e85f50..98f6da9ba6a58 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshots.java @@ -23,13 +23,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RerouteService; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.cluster.routing.allocation.ExistingShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -41,7 +39,6 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; @@ -51,7 +48,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogStats; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.XPackLicenseState; @@ -67,15 +63,11 @@ import org.elasticsearch.repositories.blobstore.BlobStoreRepository; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.snapshots.sourceonly.SourceOnlySnapshotRepository; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; @@ -167,7 +159,7 @@ public class SearchableSnapshots extends Plugin implements IndexStorePlugin, Eng Setting.Property.NotCopyableOnResize ); public static final Setting SNAPSHOT_INDEX_NAME_SETTING = Setting.simpleString( - "index.store.snapshot.index_name", + SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_INDEX_NAME_SETTING_KEY, Setting.Property.IndexScope, Setting.Property.PrivateIndex, Setting.Property.NotCopyableOnResize @@ -319,24 +311,14 @@ public List> getSettings() { } @Override - public Collection createComponents( - final Client client, - final ClusterService clusterService, - final ThreadPool threadPool, - final ResourceWatcherService resourceWatcherService, - final ScriptService scriptService, - final NamedXContentRegistry xContentRegistry, - final Environment environment, - final NodeEnvironment nodeEnvironment, - final NamedWriteableRegistry registry, - final IndexNameExpressionResolver resolver, - final Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); + NodeEnvironment nodeEnvironment = services.nodeEnvironment(); + final List components = new ArrayList<>(); - this.repositoriesServiceSupplier = repositoriesServiceSupplier; + this.repositoriesServiceSupplier = services.repositoriesServiceSupplier(); this.threadPool.set(threadPool); this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); if (DiscoveryNode.canContainData(settings)) { @@ -364,7 +346,7 @@ public Collection createComponents( PersistentCache.cleanUp(settings, nodeEnvironment); } - if (DiscoveryNode.isMasterNode(environment.settings())) { + if (DiscoveryNode.isMasterNode(services.environment().settings())) { // Tracking usage of searchable snapshots is too costly to do on each individually mounted snapshot. // Instead, we periodically look through the indices and identify if any are searchable snapshots, // then marking the feature as used. We do this on each master node so that if one master fails, the diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java index 59d6e7a5feac3..b72fc99778a31 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/action/TransportMountSearchableSnapshotAction.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ListenableFuture; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexSettings; @@ -180,7 +181,10 @@ protected void masterOperation( SearchableSnapshots.getSearchableRepository(repository); // just check it's valid final ListenableFuture repositoryDataListener = new ListenableFuture<>(); - repository.getRepositoryData(repositoryDataListener); + repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO fork to SNAPSHOT_META and drop the forking below, see #101445 + repositoryDataListener + ); repositoryDataListener.addListener(listener.delegateFailureAndWrap((delegate, repoData) -> { final Map indexIds = repoData.getIndices(); if (indexIds.containsKey(indexName) == false) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java index 3d9aefba65913..8a7bcc565acfa 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java @@ -19,7 +19,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.ShardLimitValidator; import org.elasticsearch.threadpool.ThreadPool; @@ -99,8 +99,8 @@ static boolean needsUpgrade(ClusterState state) { return state.metadata() .stream() .filter( - imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersion.V_7_12_0) - && imd.getCompatibilityVersion().before(IndexVersion.V_8_0_0) + imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) + && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) ) .filter(IndexMetadata::isPartialSearchableSnapshot) .map(IndexMetadata::getSettings) @@ -115,8 +115,8 @@ static ClusterState upgradeIndices(ClusterState currentState) { currentState.metadata() .stream() .filter( - imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersion.V_7_12_0) - && imd.getCompatibilityVersion().before(IndexVersion.V_8_0_0) + imd -> imd.getCompatibilityVersion().onOrAfter(IndexVersions.V_7_12_0) + && imd.getCompatibilityVersion().before(IndexVersions.V_8_0_0) ) .filter(imd -> imd.isPartialSearchableSnapshot() && notFrozenShardLimitGroup(imd.getSettings())) .map(SearchableSnapshotIndexMetadataUpgrader::setShardLimitGroupFrozen) diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 7f1dc6f51d73e..594d356becf87 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.ShardLimitValidator; import org.elasticsearch.snapshots.SearchableSnapshotsSettings; import org.elasticsearch.test.ESTestCase; @@ -118,8 +119,8 @@ private Settings partialNeedsUpgrade() { return searchableSnapshotSettings( IndexVersionUtils.randomVersionBetween( random(), - IndexVersion.V_7_12_0, - IndexVersionUtils.getPreviousVersion(IndexVersion.V_8_0_0) + IndexVersions.V_7_12_0, + IndexVersionUtils.getPreviousVersion(IndexVersions.V_8_0_0) ), true ); @@ -131,7 +132,7 @@ private Settings partialNeedsUpgrade() { private Settings partial_7_13plus() { return shardLimitGroupFrozen( searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_13_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_13_0, IndexVersion.current()), true ) ); @@ -142,7 +143,7 @@ private Settings partial_7_13plus() { */ private Settings partial_8plusNoShardLimit() { return searchableSnapshotSettings( - IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_8_0_0, IndexVersion.current()), + IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_8_0_0, IndexVersion.current()), true ); } diff --git a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java index f58ddd3a067cd..f66631a57b4bb 100644 --- a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java +++ b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java @@ -110,8 +110,7 @@ public class ServiceAccountIT extends ESRestTestCase { ], "privileges": [ "read", - "write", - "auto_configure" + "write" ], "allow_restricted_indices": false }, diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java index d711fe4639fe6..58d33fc221b21 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java @@ -135,7 +135,7 @@ public void onFailure(Exception e) { client.execute( ModifyDataStreamsAction.INSTANCE, new ModifyDataStreamsAction.Request(List.of(DataStreamAction.removeBackingIndex(dataStreamName, ghostReference.getName()))) - ).actionGet() + ) ); ClusterState after = internalCluster().getCurrentMasterNodeInstance(ClusterService.class).state(); assertThat(after.getMetadata().dataStreams().get(dataStreamName).getIndices(), hasSize(1)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index 301b264308af0..7c33c69460768 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -386,18 +386,8 @@ public void testRequestCacheWithTemplateRoleQuery() { private void prepareIndices() { final Client client = client(); - assertAcked( - client.admin() - .cluster() - .preparePutStoredScript() - .setId("my-script") - .setContent( - new BytesArray(""" - {"script":{"source":"{\\"match\\":{\\"username\\":\\"{{_user.username}}\\"}}","lang":"mustache"}}"""), - XContentType.JSON - ) - .get() - ); + assertAcked(client.admin().cluster().preparePutStoredScript().setId("my-script").setContent(new BytesArray(""" + {"script":{"source":"{\\"match\\":{\\"username\\":\\"{{_user.username}}\\"}}","lang":"mustache"}}"""), XContentType.JSON)); assertAcked(indicesAdmin().prepareCreate(DLS_INDEX).addAlias(new Alias("dls-alias")).get()); client.prepareIndex(DLS_INDEX).setId("101").setSource("number", 101, "letter", "A").get(); @@ -408,7 +398,7 @@ private void prepareIndices() { client.prepareIndex(FLS_INDEX).setId("202").setSource("public", "Y", "private", "y").get(); assertAcked( - indicesAdmin().prepareCreate(INDEX).addAlias(new Alias(ALIAS1)).addAlias(new Alias(ALIAS2)).addAlias(new Alias(ALL_ALIAS)).get() + indicesAdmin().prepareCreate(INDEX).addAlias(new Alias(ALIAS1)).addAlias(new Alias(ALIAS2)).addAlias(new Alias(ALL_ALIAS)) ); client.prepareIndex(INDEX).setId("1").setSource("number", 1, "letter", "a", "private", "sesame_1", "public", "door_1").get(); client.prepareIndex(INDEX).setId("2").setSource("number", 2, "letter", "b", "private", "sesame_2", "public", "door_2").get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java index 1aa5c9609f6a1..b5e5183df086d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java @@ -108,7 +108,7 @@ public void testDuelWithAliasFilters() throws Exception { SearchResponse searchResponse1 = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user" + roleI, USERS_PASSWD)) ).prepareSearch("test").get(); - SearchResponse searchResponse2 = client().prepareSearch("alias" + roleI).get(); + SearchResponse searchResponse2 = prepareSearch("alias" + roleI).get(); assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 80f2ed881bbad..a76b043737375 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -95,7 +95,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; @@ -208,32 +208,29 @@ public void testSimpleQuery() throws Exception { client().prepareIndex("test").setId("2").setSource("field2", "value2").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("test").setId("3").setSource("field3", "value3").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ) - .prepareSearch("test") - .setQuery(randomBoolean() ? QueryBuilders.termQuery("field1", "value1") : QueryBuilders.matchAllQuery()) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(randomBoolean() ? QueryBuilders.termQuery("field2", "value2") : QueryBuilders.matchAllQuery()) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "2"); - + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(randomBoolean() ? QueryBuilders.termQuery("field1", "value1") : QueryBuilders.matchAllQuery()), + "1" + ); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(randomBoolean() ? QueryBuilders.termQuery("field2", "value2") : QueryBuilders.matchAllQuery()), + "2" + ); QueryBuilder combined = QueryBuilders.boolQuery() .should(QueryBuilders.termQuery("field2", "value2")) .should(QueryBuilders.termQuery("field1", "value1")) .minimumShouldMatch(1); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("test") - .setQuery(randomBoolean() ? combined : QueryBuilders.matchAllQuery()) - .get(); - assertHitCount(response, 2); - assertSearchHits(response, "1", "2"); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("test") + .setQuery(randomBoolean() ? combined : QueryBuilders.matchAllQuery()), + "1", + "2" + ); } public void testGetApi() throws Exception { @@ -458,8 +455,8 @@ public void testMSearch() throws Exception { Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -475,8 +472,8 @@ public void testMSearch() throws Exception { response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -493,14 +490,10 @@ public void testMSearch() throws Exception { response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareMultiSearch() .add( - client().prepareSearch("test1") - .addSort(SortBuilders.fieldSort("id").sortMode(SortMode.MIN)) - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("test1").addSort(SortBuilders.fieldSort("id").sortMode(SortMode.MIN)).setQuery(QueryBuilders.matchAllQuery()) ) .add( - client().prepareSearch("test2") - .addSort(SortBuilders.fieldSort("id").sortMode(SortMode.MIN)) - .setQuery(QueryBuilders.matchAllQuery()) + prepareSearch("test2").addSort(SortBuilders.fieldSort("id").sortMode(SortMode.MIN)).setQuery(QueryBuilders.matchAllQuery()) ) .get(); assertFalse(response.getResponses()[0].isFailure()); @@ -543,14 +536,14 @@ public void testPercolateQueryWithIndexedDocWithDLS() { SearchResponse result = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearch("query_index").setQuery(new PercolateQueryBuilder("query", "doc_index", "1", null, null, null)).get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); // user2 can access the query_index itself (without performing percolate search) result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) .prepareSearch("query_index") .setQuery(QueryBuilders.matchAllQuery()) .get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); // user2 cannot access doc#1 of the doc_index so the percolate search fails because doc#1 cannot be found ResourceNotFoundException e = expectThrows( @@ -598,14 +591,14 @@ public void testGeoQueryWithIndexedShapeWithDLS() { requestBuilder.setQuery(shapeQuery); } result = requestBuilder.get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); // user2 does not have access to doc#1 of the shape_index result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) .prepareSearch("search_index") .setQuery(QueryBuilders.matchAllQuery()) .get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); IllegalArgumentException e; if (randomBoolean()) { @@ -676,11 +669,14 @@ public void testTermsLookupOnIndexWithDLS() { // Lookup doc#1 is: visible to user1 and user3, but hidden from user2 TermsQueryBuilder lookup = QueryBuilders.termsLookupQuery("search_field", new TermsLookup("lookup_index", "1", "lookup_field")); - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) - ).prepareSearch("search_index").setQuery(lookup).get(); - assertHitCount(response, 3); - assertSearchHits(response, "1", "2", "3"); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(lookup), + "1", + "2", + "3" + ); assertHitCount( client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) @@ -688,13 +684,16 @@ public void testTermsLookupOnIndexWithDLS() { .setQuery(lookup), 0 ); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("search_index") - .setQuery(lookup) - .get(); - assertHitCount(response, 5); - assertSearchHits(response, "1", "2", "3", "4", "5"); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(lookup), + "1", + "2", + "3", + "4", + "5" + ); // Lookup doc#2 is: hidden from user1, visible to user2 and user3 lookup = QueryBuilders.termsLookupQuery("search_field", new TermsLookup("lookup_index", "2", "lookup_field")); assertHitCount( @@ -704,20 +703,21 @@ public void testTermsLookupOnIndexWithDLS() { .get(), 0 ); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) - .prepareSearch("search_index") - .setQuery(lookup) - .get(); - assertHitCount(response, 2); - assertSearchHits(response, "2", "5"); - - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) - .prepareSearch("search_index") - .setQuery(lookup) - .get(); - assertHitCount(response, 3); - assertSearchHits(response, "1", "2", "5"); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(lookup), + "2", + "5" + ); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(lookup), + "1", + "2", + "5" + ); } public void testTVApi() throws Exception { @@ -921,9 +921,9 @@ public void testGlobalAggregation() throws Exception { client().prepareIndex("test").setId("2").setSource("field2", "value2").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("test").setId("3").setSource("field3", "value3").setRefreshPolicy(IMMEDIATE).get(); - SearchResponse response = client().prepareSearch("test") - .addAggregation(AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2"))) - .get(); + SearchResponse response = prepareSearch("test").addAggregation( + AggregationBuilders.global("global").subAggregation(AggregationBuilders.terms("field2").field("field2")) + ).get(); assertHitCount(response, 3); assertSearchHits(response, "1", "2", "3"); @@ -1019,14 +1019,11 @@ public void testParentChild() throws Exception { } private void verifyParentChild() { - SearchResponse searchResponse = client().prepareSearch("test") - .setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None)) - .get(); + SearchResponse searchResponse = prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None)).get(); assertHitCount(searchResponse, 1L); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); - searchResponse = client().prepareSearch("test") - .setQuery(hasParentQuery("parent", matchAllQuery(), false)) + searchResponse = prepareSearch("test").setQuery(hasParentQuery("parent", matchAllQuery(), false)) .addSort("id", SortOrder.ASC) .get(); assertHitCount(searchResponse, 3L); @@ -1352,9 +1349,9 @@ public void testSuggesters() throws Exception { ); // Term suggester: - SearchResponse response = client().prepareSearch("test") - .suggest(new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new TermSuggestionBuilder("suggest_field1"))) - .get(); + SearchResponse response = prepareSearch("test").suggest( + new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new TermSuggestionBuilder("suggest_field1")) + ).get(); assertNoFailures(response); TermSuggestion termSuggestion = response.getSuggest().getSuggestion("_name1"); @@ -1377,9 +1374,9 @@ public void testSuggesters() throws Exception { assertThat(e.getMessage(), equalTo("Suggest isn't supported if document level security is enabled")); // Phrase suggester: - response = client().prepareSearch("test") - .suggest(new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new PhraseSuggestionBuilder("suggest_field1"))) - .get(); + response = prepareSearch("test").suggest( + new SuggestBuilder().setGlobalText("valeu").addSuggestion("_name1", new PhraseSuggestionBuilder("suggest_field1")) + ).get(); assertNoFailures(response); PhraseSuggestion phraseSuggestion = response.getSuggest().getSuggestion("_name1"); @@ -1398,9 +1395,9 @@ public void testSuggesters() throws Exception { assertThat(e.getMessage(), equalTo("Suggest isn't supported if document level security is enabled")); // Completion suggester: - response = client().prepareSearch("test") - .suggest(new SuggestBuilder().setGlobalText("valu").addSuggestion("_name1", new CompletionSuggestionBuilder("suggest_field2"))) - .get(); + response = prepareSearch("test").suggest( + new SuggestBuilder().setGlobalText("valu").addSuggestion("_name1", new CompletionSuggestionBuilder("suggest_field2")) + ).get(); assertNoFailures(response); CompletionSuggestion completionSuggestion = response.getSuggest().getSuggestion("_name1"); @@ -1445,10 +1442,7 @@ public void testProfile() throws Exception { .setMapping("field1", "type=text", "other_field", "type=text", "yet_another", "type=text") ); - SearchResponse response = client().prepareSearch("test") - .setProfile(true) - .setQuery(new FuzzyQueryBuilder("other_field", "valeu")) - .get(); + SearchResponse response = prepareSearch("test").setProfile(true).setQuery(new FuzzyQueryBuilder("other_field", "valeu")).get(); assertNoFailures(response); assertThat(response.getProfileResults().size(), equalTo(1)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 8c69a96648a3a..f9bd893ea3653 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -206,8 +206,7 @@ public void testDuel() throws Exception { .should(QueryBuilders.termQuery("field3", "value")) ) .get(); - SearchResponse expected = client().prepareSearch("test") - .addSort("id", SortOrder.ASC) + SearchResponse expected = prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))) .get(); assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); @@ -226,8 +225,7 @@ public void testDuel() throws Exception { .should(QueryBuilders.termQuery("field3", "value")) ) .get(); - expected = client().prepareSearch("test") - .addSort("id", SortOrder.ASC) + expected = prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))) .get(); assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); @@ -246,8 +244,7 @@ public void testDuel() throws Exception { .should(QueryBuilders.termQuery("field3", "value")) ) .get(); - expected = client().prepareSearch("test") - .addSort("id", SortOrder.ASC) + expected = prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))) .get(); assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index 1aee3e445ab94..d5d48440c34ea 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -81,8 +81,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.equalTo; @@ -485,33 +484,33 @@ public void testPercolateQueryWithIndexedDocWithFLS() { SearchResponse result = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD)) ).prepareSearch("query_index").setQuery(percolateQuery).get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareSearch("query_index") .setQuery(QueryBuilders.matchAllQuery()) .get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); // user 3 can see the fields of the percolated document, but not the "query" field of the indexed query result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareSearch("query_index") .setQuery(percolateQuery) .get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 0); result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user9", USERS_PASSWD))) .prepareSearch("query_index") .setQuery(QueryBuilders.matchAllQuery()) .get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); // user 9 can see the fields of the index query, but not the field of the indexed document to be percolated result = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user9", USERS_PASSWD))) .prepareSearch("query_index") .setQuery(percolateQuery) .get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 0); } @@ -573,7 +572,7 @@ public void testGeoQueryWithIndexedShapeWithFLS() { requestBuilder.setQuery(shapeQuery1); } result = requestBuilder.get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 1); // user sees the queried point but not the querying shape final ShapeQueryBuilder shapeQuery2 = new ShapeQueryBuilder("field", "2").relation(ShapeRelation.WITHIN) @@ -611,7 +610,7 @@ public void testGeoQueryWithIndexedShapeWithFLS() { requestBuilder.setQuery(shapeQuery3); } result = requestBuilder.get(); - assertSearchResponse(result); + assertNoFailures(result); assertHitCount(result, 0); } @@ -633,20 +632,19 @@ public void testTermsLookupOnIndexWithFLS() { client().prepareIndex("lookup_index").setId("2").setSource("other", "value2", "field", "value2").setRefreshPolicy(IMMEDIATE).get(); // user sees the terms doc field - SearchResponse response = client().filterWithHeader( - Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD)) - ) - .prepareSearch("search_index") - .setQuery(QueryBuilders.termsLookupQuery("field", new TermsLookup("lookup_index", "1", "field"))) - .get(); - assertHitCount(response, 2); - assertSearchHits(response, "1", "2"); - response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) - .prepareSearch("search_index") - .setQuery(QueryBuilders.termsLookupQuery("field", new TermsLookup("lookup_index", "2", "field"))) - .get(); - assertHitCount(response, 1); - assertSearchHits(response, "1"); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(QueryBuilders.termsLookupQuery("field", new TermsLookup("lookup_index", "1", "field"))), + "1", + "2" + ); + assertSearchHitsWithoutFailures( + client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) + .prepareSearch("search_index") + .setQuery(QueryBuilders.termsLookupQuery("field", new TermsLookup("lookup_index", "2", "field"))), + "1" + ); // user does not see the terms doc field assertHitCount( client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) @@ -936,8 +934,8 @@ public void testMSearchApi() throws Exception { Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -950,8 +948,8 @@ public void testMSearchApi() throws Exception { // user2 is granted access to field2 only response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -964,8 +962,8 @@ public void testMSearchApi() throws Exception { // user3 is granted access to field1 and field2 response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user3", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -980,8 +978,8 @@ public void testMSearchApi() throws Exception { // user4 is granted access to no fields, so the search response does say the doc exist, but no fields are returned response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user4", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -992,8 +990,8 @@ public void testMSearchApi() throws Exception { // user5 has no field level security configured, so all fields are returned response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user5", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -1010,8 +1008,8 @@ public void testMSearchApi() throws Exception { // user6 has access to field* response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user6", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -1028,8 +1026,8 @@ public void testMSearchApi() throws Exception { // user7 has roles with field level security and without field level security response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user7", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); @@ -1046,8 +1044,8 @@ public void testMSearchApi() throws Exception { // user8 has roles with field level security with access to field1 and field2 response = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user8", USERS_PASSWD))) .prepareMultiSearch() - .add(client().prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) - .add(client().prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test1").setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())) .get(); assertFalse(response.getResponses()[0].isFailure()); assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index fe0dd919d7453..e2cce37789ffb 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -102,7 +102,7 @@ public void testSearchAndMSearch() throws Exception { final String field = "foo"; indexRandom(true, client().prepareIndex().setIndex(index).setSource(field, "bar")); - SearchResponse response = client().prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).get(); + SearchResponse response = prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()).get(); final long hits = response.getHits().getTotalHits().value; assertThat(hits, greaterThan(0L)); response = client().filterWithHeader( @@ -111,13 +111,13 @@ public void testSearchAndMSearch() throws Exception { assertEquals(response.getHits().getTotalHits().value, hits); MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() - .add(client().prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) + .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) .get(); final long multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; assertThat(hits, greaterThan(0L)); multiSearchResponse = client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) - ).prepareMultiSearch().add(client().prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); + ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java index 49bb1034ee0bc..d2e3907204654 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityCachePermissionTests.java @@ -47,10 +47,9 @@ public void loadData() { } public void testThatTermsFilterQueryDoesntLeakData() { - SearchResponse response = client().prepareSearch("data") - .setQuery(QueryBuilders.constantScoreQuery(QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "1", "tokens")))) - .execute() - .actionGet(); + SearchResponse response = prepareSearch("data").setQuery( + QueryBuilders.constantScoreQuery(QueryBuilders.termsLookupQuery("token", new TermsLookup("tokens", "1", "tokens"))) + ).execute().actionGet(); assertThat(response.isTimedOut(), is(false)); assertThat(response.getHits().getHits().length, is(1)); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityClearScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityClearScrollTests.java index 83cda0e51851d..ca826be904771 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityClearScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/SecurityClearScrollTests.java @@ -78,7 +78,7 @@ public void indexRandomDocuments() { MultiSearchRequestBuilder multiSearchRequestBuilder = client().prepareMultiSearch(); int count = randomIntBetween(5, 15); for (int i = 0; i < count; i++) { - multiSearchRequestBuilder.add(client().prepareSearch("index").setScroll("10m").setSize(1)); + multiSearchRequestBuilder.add(prepareSearch("index").setScroll("10m").setSize(1)); } MultiSearchResponse multiSearchResponse = multiSearchRequestBuilder.get(); scrollIds = getScrollIds(multiSearchResponse); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/ShrinkIndexWithSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/ShrinkIndexWithSecurityTests.java index 1a663678a738b..c5efabfca13db 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/ShrinkIndexWithSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/ShrinkIndexWithSecurityTests.java @@ -55,9 +55,6 @@ public void testShrinkIndex() throws Exception { // verify all docs ensureGreen(); - assertHitCount( - client().prepareSearch("shrunk_bigindex").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), - randomNumberOfDocs - ); + assertHitCount(prepareSearch("shrunk_bigindex").setSize(100).setQuery(new TermsQueryBuilder("foo", "bar")), randomNumberOfDocs); } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java index 4f9cb10a2e3bc..9115c35551585 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java @@ -149,7 +149,6 @@ void clearRealmCache() { new ClearRealmCacheRequestBuilder(client()).get(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97772") public void testDelegateThenAuthenticate() throws Exception { final X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); final X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); @@ -192,7 +191,6 @@ public void testDelegateThenAuthenticate() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97772") public void testTokenInvalidate() throws Exception { final X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); final X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); @@ -296,7 +294,6 @@ public void testDelegateUnauthorized() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/97772") public void testDelegatePkiWithRoleMapping() throws Exception { X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java index 5418bbf7f8dd9..f7bc8a1770981 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/IndexAliasesTests.java @@ -351,7 +351,6 @@ public void testDeleteAliasesCreateAndAliasesPermission() { .addAlias(new Alias("test_alias_2")) .addAlias(new Alias("test_alias_3")) .addAlias(new Alias("test_alias_4")) - .get() ); // ok: user has manage_aliases on test_* assertAcked(client.admin().indices().prepareAliases().removeAlias("test_1", "test_alias_1").get()); @@ -824,11 +823,7 @@ public void testAliasesForHiddenIndices() { final Client aliasesClient = client(aliasHeaders); assertAcked( - createClient.admin() - .indices() - .prepareCreate(hiddenIndex) - .setSettings(Settings.builder().put("index.hidden", true).build()) - .get() + createClient.admin().indices().prepareCreate(hiddenIndex).setSettings(Settings.builder().put("index.hidden", true).build()) ); assertAcked( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index d90434ea7d9a8..6220fc2ae2c2c 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -13,13 +13,13 @@ import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsAction; import org.elasticsearch.core.Strings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.SecurityIntegTestCase; @@ -54,17 +54,13 @@ protected String configRoles() { public void testSearchForAll() { // index1 is not authorized and referred to through wildcard createIndicesWithRandomAliases("test1", "test2", "test3", "index1"); - - SearchResponse searchResponse = trySearch(); - assertReturnedIndices(searchResponse, "test1", "test2", "test3"); + assertReturnedIndices(trySearch(), "test1", "test2", "test3"); } public void testSearchForWildcard() { // index1 is not authorized and referred to through wildcard createIndicesWithRandomAliases("test1", "test2", "test3", "index1"); - - SearchResponse searchResponse = trySearch("*"); - assertReturnedIndices(searchResponse, "test1", "test2", "test3"); + assertReturnedIndices(trySearch("*"), "test1", "test2", "test3"); } public void testSearchNonAuthorizedWildcard() { @@ -78,7 +74,7 @@ public void testSearchNonAuthorizedWildcardDisallowNoIndices() { createIndicesWithRandomAliases("test1", "test2", "index1", "index2"); IndexNotFoundException e = expectThrows( IndexNotFoundException.class, - () -> trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean()), "index*") + trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean()), "index*") ); assertEquals("no such index [index*]", e.getMessage()); } @@ -90,20 +86,19 @@ public void testEmptyClusterSearchForAll() { public void testEmptyClusterSearchForAllDisallowNoIndices() { IndexNotFoundException e = expectThrows( IndexNotFoundException.class, - () -> trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())) + trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())) ); assertEquals("no such index [[]]", e.getMessage()); } public void testEmptyClusterSearchForWildcard() { - SearchResponse searchResponse = trySearch("*"); - assertNoSearchHits(searchResponse); + assertNoSearchHits(trySearch("*")); } public void testEmptyClusterSearchForWildcardDisallowNoIndices() { IndexNotFoundException e = expectThrows( IndexNotFoundException.class, - () -> trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean()), "*") + trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean()), "*") ); assertEquals("no such index [*]", e.getMessage()); } @@ -117,7 +112,7 @@ public void testEmptyAuthorizedIndicesSearchForAllDisallowNoIndices() { createIndicesWithRandomAliases("index1", "index2"); IndexNotFoundException e = expectThrows( IndexNotFoundException.class, - () -> trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())) + trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean())) ); assertEquals("no such index [[]]", e.getMessage()); } @@ -131,19 +126,19 @@ public void testEmptyAuthorizedIndicesSearchForWildcardDisallowNoIndices() { createIndicesWithRandomAliases("index1", "index2"); IndexNotFoundException e = expectThrows( IndexNotFoundException.class, - () -> trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean()), "*") + trySearch(IndicesOptions.fromOptions(randomBoolean(), false, true, randomBoolean()), "*") ); assertEquals("no such index [*]", e.getMessage()); } public void testExplicitNonAuthorizedIndex() { createIndicesWithRandomAliases("test1", "test2", "index1"); - assertThrowsAuthorizationExceptionDefaultUsers(() -> trySearch("test*", "index1"), SearchAction.NAME); + assertThrowsAuthorizationExceptionDefaultUsers(() -> trySearch("test*", "index1").get(), SearchAction.NAME); } public void testIndexNotFound() { createIndicesWithRandomAliases("test1", "test2", "index1"); - assertThrowsAuthorizationExceptionDefaultUsers(() -> trySearch("missing"), SearchAction.NAME); + assertThrowsAuthorizationExceptionDefaultUsers(() -> trySearch("missing").get(), SearchAction.NAME); } public void testIndexNotFoundIgnoreUnavailable() { @@ -176,46 +171,36 @@ public void testIndexNotFoundIgnoreUnavailable() { public void testExplicitExclusion() { // index1 is not authorized and referred to through wildcard, test2 is excluded createIndicesWithRandomAliases("test1", "test2", "test3", "index1"); - - SearchResponse searchResponse = trySearch("*", "-test2"); - assertReturnedIndices(searchResponse, "test1", "test3"); + assertReturnedIndices(trySearch("*", "-test2"), "test1", "test3"); } public void testWildcardExclusion() { // index1 is not authorized and referred to through wildcard, test2 is excluded createIndicesWithRandomAliases("test1", "test2", "test21", "test3", "index1"); - - SearchResponse searchResponse = trySearch("*", "-test2*"); - assertReturnedIndices(searchResponse, "test1", "test3"); + assertReturnedIndices(trySearch("*", "-test2*"), "test1", "test3"); } public void testInclusionAndWildcardsExclusion() { // index1 is not authorized and referred to through wildcard, test111 and test112 are excluded createIndicesWithRandomAliases("test1", "test10", "test111", "test112", "test2", "index1"); - - SearchResponse searchResponse = trySearch("test1*", "index*", "-test11*"); - assertReturnedIndices(searchResponse, "test1", "test10"); + assertReturnedIndices(trySearch("test1*", "index*", "-test11*"), "test1", "test10"); } public void testExplicitAndWildcardsInclusionAndWildcardExclusion() { // index1 is not authorized and referred to through wildcard, test111 and test112 are excluded createIndicesWithRandomAliases("test1", "test10", "test111", "test112", "test2", "index1"); - - SearchResponse searchResponse = trySearch("test2", "test11*", "index*", "-test2*"); - assertReturnedIndices(searchResponse, "test111", "test112"); + assertReturnedIndices(trySearch("test2", "test11*", "index*", "-test2*"), "test111", "test112"); } public void testExplicitAndWildcardInclusionAndExplicitExclusions() { // index1 is not authorized and referred to through wildcard, test111 and test112 are excluded createIndicesWithRandomAliases("test1", "test10", "test111", "test112", "test2", "index1"); - - SearchResponse searchResponse = trySearch("test10", "test11*", "index*", "-test111", "-test112"); - assertReturnedIndices(searchResponse, "test10"); + assertReturnedIndices(trySearch("test10", "test11*", "index*", "-test111", "-test112"), "test10"); } public void testMissingDateMath() { - expectThrows(ElasticsearchSecurityException.class, () -> trySearch("")); - expectThrows(IndexNotFoundException.class, () -> trySearch("")); + expectThrows(ElasticsearchSecurityException.class, trySearch("")); + expectThrows(IndexNotFoundException.class, trySearch("")); } public void testMultiSearchUnauthorizedIndex() { @@ -424,12 +409,25 @@ public void testMultiTermVectors() { assertThat(response.getResponses()[4].getFailure().getCause(), instanceOf(IndexNotFoundException.class)); } - private SearchResponse trySearch(String... indices) { - return client().prepareSearch(indices).get(TimeValue.timeValueSeconds(20)); + private SearchRequestBuilder trySearch(String... indices) { + return prepareSearch(indices); + } + + private SearchRequestBuilder trySearch(IndicesOptions options, String... indices) { + return prepareSearch(indices).setIndicesOptions(options); } - private SearchResponse trySearch(IndicesOptions options, String... indices) { - return client().prepareSearch(indices).setIndicesOptions(options).get(TimeValue.timeValueSeconds(20)); + private static T expectThrows(Class expectedType, SearchRequestBuilder searchRequestBuilder) { + return expectThrows(expectedType, searchRequestBuilder::get); + } + + private static void assertReturnedIndices(SearchRequestBuilder searchRequestBuilder, String... indices) { + var searchResponse = searchRequestBuilder.get(); + try { + assertReturnedIndices(searchResponse, indices); + } finally { + searchResponse.decRef(); + } } private static void assertReturnedIndices(SearchResponse searchResponse, String... indices) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index 0ead996103256..57137075c5942 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -46,11 +46,7 @@ public void testScrollIsPerUser() throws Exception { } indexRandom(true, docs); - SearchResponse response = client().prepareSearch("foo") - .setScroll(TimeValue.timeValueSeconds(5L)) - .setQuery(matchAllQuery()) - .setSize(1) - .get(); + SearchResponse response = prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1).get(); assertEquals(numDocs, response.getHits().getTotalHits().value); assertEquals(1, response.getHits().getHits().length); @@ -81,8 +77,7 @@ public void testSearchAndClearScroll() throws Exception { docs[i] = client().prepareIndex("idx").setSource("field", "value"); } indexRandom(true, docs); - SearchResponse response = client().prepareSearch() - .setQuery(matchAllQuery()) + SearchResponse response = prepareSearch().setQuery(matchAllQuery()) .setScroll(TimeValue.timeValueSeconds(5L)) .setSize(randomIntBetween(1, 10)) .get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java index 544d86525a971..0892c6f88873f 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/SecurityDomainIntegTests.java @@ -379,7 +379,7 @@ private void assertAccessToken(CreateTokenResponse createTokenResponse) throws I .prepareHealth() .execute() .actionGet(); - final SearchResponse searchResponse = client().prepareSearch(SecuritySystemIndices.SECURITY_TOKENS_ALIAS).execute().actionGet(); + final SearchResponse searchResponse = prepareSearch(SecuritySystemIndices.SECURITY_TOKENS_ALIAS).execute().actionGet(); final String encodedAuthentication = createTokenResponse.getAuthentication().encode(); for (SearchHit searchHit : searchResponse.getHits().getHits()) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java index 263480b9cb8e7..4705361e51dbd 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerIntegTests.java @@ -123,7 +123,6 @@ public void testSecurityIndexSettingsCannotBeChanged() throws Exception { .put("index.number_of_replicas", "8") .build() ) - .get() ); // create an new-style template ComposableIndexTemplate cit = new ComposableIndexTemplate( @@ -146,7 +145,7 @@ public void testSecurityIndexSettingsCannotBeChanged() throws Exception { client().execute( PutComposableIndexTemplateAction.INSTANCE, new PutComposableIndexTemplateAction.Request("composable-template-covering-the-main-security-index").indexTemplate(cit) - ).get() + ) ); // trigger index auto-creation final PutUserResponse putUserResponse = new PutUserRequestBuilder(client()).username("user") diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index fd38b40683f71..42b4c8c459eb0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -28,7 +28,6 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; @@ -53,7 +52,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeMetadata; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.HttpServerTransport; @@ -61,7 +59,6 @@ import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; import org.elasticsearch.http.netty4.internal.HttpValidator; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.ingest.Processor; @@ -81,7 +78,6 @@ import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.plugins.interceptor.RestServerActionPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; @@ -90,7 +86,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.internal.ShardSearchRequest; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -624,33 +619,18 @@ protected XPackLicenseState getLicenseState() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { try { return createComponents( - client, - threadPool, - clusterService, - resourceWatcherService, - scriptService, - xContentRegistry, - environment, - nodeEnvironment.nodeMetadata(), - expressionResolver + services.client(), + services.threadPool(), + services.clusterService(), + services.resourceWatcherService(), + services.scriptService(), + services.xContentRegistry(), + services.environment(), + services.nodeEnvironment().nodeMetadata(), + services.indexNameExpressionResolver() ); } catch (final Exception e) { throw new IllegalStateException("security initialization failed", e); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index a33c45adf814e..8942be0bee29c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -75,7 +75,7 @@ final class ElasticServiceAccounts { ) .privileges("write", "create_index", "auto_configure") .build(), - RoleDescriptor.IndicesPrivileges.builder().indices("profiling-*").privileges("read", "write", "auto_configure").build(), + RoleDescriptor.IndicesPrivileges.builder().indices("profiling-*").privileges("read", "write").build(), RoleDescriptor.IndicesPrivileges.builder() // APM Server (and hence Fleet Server, which issues its API Keys) needs additional privileges // for the non-sensitive "sampled traces" data stream: diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java index f75d6adc838a2..2f5f809702ccd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/operator/DefaultOperatorOnlyRegistry.java @@ -41,11 +41,8 @@ public class DefaultOperatorOnlyRegistry implements OperatorOnlyRegistry { // Autoscaling does not publish its actions to core, literal strings are needed. "cluster:admin/autoscaling/put_autoscaling_policy", "cluster:admin/autoscaling/delete_autoscaling_policy", - // Repository analysis actions are not mentioned in core, literal strings are needed. + // Repository analysis is not mentioned in core, a literal string is needed. "cluster:admin/repository/analyze", - "cluster:admin/repository/analyze/blob", - "cluster:admin/repository/analyze/blob/read", - "cluster:admin/repository/analyze/register", // Node shutdown APIs are operator only "cluster:admin/shutdown/create", "cluster:admin/shutdown/get", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 88725e015e511..66790c9898230 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.indices.TestIndexNameExpressionResolver; @@ -402,7 +403,7 @@ public void testJoinValidatorOnDisabledSecurity() throws Exception { public void testJoinValidatorForFIPSOnAllowedLicense() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.builder("foo") - .version(VersionUtils.randomVersion(random()), IndexVersion.ZERO, IndexVersionUtils.randomVersion()) + .version(VersionUtils.randomVersion(random()), IndexVersions.ZERO, IndexVersionUtils.randomVersion()) .build(); Metadata.Builder builder = Metadata.builder(); License license = TestUtils.generateSignedLicense( @@ -427,7 +428,7 @@ public void testJoinValidatorForFIPSOnAllowedLicense() throws Exception { public void testJoinValidatorForFIPSOnForbiddenLicense() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.builder("foo") - .version(VersionUtils.randomVersion(random()), IndexVersion.ZERO, IndexVersionUtils.randomVersion()) + .version(VersionUtils.randomVersion(random()), IndexVersions.ZERO, IndexVersionUtils.randomVersion()) .build(); Metadata.Builder builder = Metadata.builder(); final String forbiddenLicenseType = randomFrom( diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 18192e29869e3..fc56061a98883 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; @@ -99,7 +100,7 @@ public void init() throws Exception { .add(DiscoveryNodeUtils.create("id1")) .add( DiscoveryNodeUtils.builder("id2") - .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersion.MINIMUM_COMPATIBLE, IndexVersion.current()) + .version(Version.CURRENT.minimumCompatibilityVersion(), IndexVersions.MINIMUM_COMPATIBLE, IndexVersion.current()) .build() ) .build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index 2c65a06a486a2..08cfdde03815d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -211,7 +211,7 @@ public void testElasticFleetServerPrivileges() { final IndexAbstraction profilingIndex = mockIndexAbstraction("profiling-" + randomAlphaOfLengthBetween(1, 20)); assertThat(role.indices().allowedIndicesMatcher(AutoPutMappingAction.NAME).test(profilingIndex), is(true)); - assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(profilingIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(DeleteAction.NAME).test(profilingIndex), is(true)); assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(profilingIndex), is(false)); assertThat(role.indices().allowedIndicesMatcher(IndexAction.NAME).test(profilingIndex), is(true)); diff --git a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java index b9b324b8e0b63..c004eaf58939b 100644 --- a/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java +++ b/x-pack/plugin/shutdown/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownIT.java @@ -15,7 +15,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Nullable; -import org.elasticsearch.test.readiness.ReadinessClientProbe; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.XContentBuilder; @@ -38,7 +37,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class NodeShutdownIT extends ESRestTestCase implements ReadinessClientProbe { +public class NodeShutdownIT extends ESRestTestCase { public void testRestartCRUD() throws Exception { checkCRUD(randomFrom("restart", "RESTART"), randomPositiveTimeValue(), null, null); @@ -98,55 +97,6 @@ public void checkCRUD( } } - @SuppressWarnings("unchecked") - public void testShutdownReadinessService() throws Exception { - // Get a node from the cluster and find its readiness port - Request getNodes = new Request("GET", "_nodes"); - Map nodesResponse = responseAsMap(client().performRequest(getNodes)); - Map nodesObject = (Map) nodesResponse.get("nodes"); - - String nodeId = nodesObject.keySet().iterator().next(); - Map nodeObject = (Map) nodesObject.get(nodeId); - Map httpObject = (Map) nodeObject.get("http"); - String publishAddress = (String) httpObject.get("publish_address"); - - String readinessPorts = this.getTestReadinessPorts(); - String restPorts = this.getTestRestCluster(); - - String[] restAddresses = restPorts.split(","); - int nodeIndex = 0; - for (String restAddress : restAddresses) { - // skip ipv6 if any - if (restAddress.startsWith("[")) { - continue; - } - if (restAddress.equals(publishAddress)) { - break; - } - nodeIndex++; - } - - String[] readinessAddresses = readinessPorts.split(","); - String readinessAddress = readinessAddresses[nodeIndex]; - - String portStr = readinessAddress.substring(readinessAddress.lastIndexOf(':') + 1); - Integer port = Integer.parseInt(portStr); - - // Once we have the right port, check to see if it's ready, has to be for a properly started cluster - tcpReadinessProbeTrue(port); - - // Mark the node for shutdown and check that it's not ready - checkCRUD(nodeId, randomFrom("restart", "RESTART"), "1ms", null, false, null); - tcpReadinessProbeFalse(port); - - // Delete the shutdown request and verify that the node is ready again - Request deleteRequest = new Request("DELETE", "_nodes/" + nodeId + "/shutdown"); - assertOK(client().performRequest(deleteRequest)); - assertNoShuttingDownNodes(nodeId); - - tcpReadinessProbeTrue(port); - } - public void testPutShutdownIsIdempotentForRestart() throws Exception { checkPutShutdownIdempotency("RESTART"); } @@ -269,7 +219,6 @@ public void testAllocationPreventedForRemoval() throws Exception { * 2) Ensures the status properly comes to rest at COMPLETE after the shards have moved. */ @SuppressWarnings("unchecked") - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/77488") public void testShardsMoveOffRemovingNode() throws Exception { String nodeIdToShutdown = getRandomNodeId(); diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java new file mode 100644 index 0000000000000..87eaf4d37ae00 --- /dev/null +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownReadinessIT.java @@ -0,0 +1,104 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.shutdown; + +import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; +import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.readiness.MockReadinessService; +import org.elasticsearch.readiness.ReadinessService; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.readiness.MockReadinessService.tcpReadinessProbeFalse; +import static org.elasticsearch.readiness.MockReadinessService.tcpReadinessProbeTrue; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.empty; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) +public class NodeShutdownReadinessIT extends ESIntegTestCase { + + @Override + protected Collection> getMockPlugins() { + final List> plugins = new ArrayList<>(super.getMockPlugins()); + plugins.add(MockReadinessService.TestPlugin.class); + return Collections.unmodifiableList(plugins); + } + + @Override + protected Collection> nodePlugins() { + return List.of(ShutdownPlugin.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + Settings.Builder settings = Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(Settings.builder().put(ReadinessService.PORT.getKey(), 0).build()); + return settings.build(); + } + + private void putNodeShutdown(String nodeId, SingleNodeShutdownMetadata.Type type, TimeValue allocationDelay) { + assertAcked( + client().execute( + PutShutdownNodeAction.INSTANCE, + new PutShutdownNodeAction.Request(nodeId, type, this.getTestName(), allocationDelay, null, null) + ) + ); + } + + private void deleteNodeShutdown(String nodeId) { + assertAcked(client().execute(DeleteShutdownNodeAction.INSTANCE, new DeleteShutdownNodeAction.Request(nodeId))); + } + + private String getNodeId(String nodeName) { + NodesInfoResponse nodes = clusterAdmin().prepareNodesInfo().clear().get(); + return nodes.getNodes() + .stream() + .map(NodeInfo::getNode) + .filter(node -> node.getName().equals(nodeName)) + .map(DiscoveryNode::getId) + .findFirst() + .orElseThrow(); + } + + private void assertNoShuttingDownNodes(String nodeId) throws ExecutionException, InterruptedException { + var response = client().execute(GetShutdownStatusAction.INSTANCE, new GetShutdownStatusAction.Request(nodeId)).get(); + assertThat(response.getShutdownStatuses(), empty()); + } + + public void testShutdownReadinessService() throws Exception { + + final String nodeName = internalCluster().startMasterOnlyNode(); + final String nodeId = getNodeId(nodeName); + + final var readinessService = internalCluster().getInstance(ReadinessService.class, nodeName); + + // Once we have the right port, check to see if it's ready, has to be for a properly started cluster + tcpReadinessProbeTrue(readinessService); + + // Mark the node for shutdown and check that it's not ready + putNodeShutdown(nodeId, SingleNodeShutdownMetadata.Type.RESTART, TimeValue.timeValueMinutes(1)); + tcpReadinessProbeFalse(readinessService); + + // Delete the shutdown request and verify that the node is ready again + deleteNodeShutdown(nodeId); + assertNoShuttingDownNodes(nodeId); + + tcpReadinessProbeTrue(readinessService); + } +} diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java index ec5b25b21da32..4e2b1bd6c5a58 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownShardsIT.java @@ -461,7 +461,7 @@ private void putNodeShutdown(String nodeId, SingleNodeShutdownMetadata.Type type client().execute( PutShutdownNodeAction.INSTANCE, new PutShutdownNodeAction.Request(nodeId, type, this.getTestName(), null, nodeReplacementName, null) - ).get() + ) ); } diff --git a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java index 6c5e1e6af69ce..15e16d2a86910 100644 --- a/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java +++ b/x-pack/plugin/shutdown/src/internalClusterTest/java/org/elasticsearch/xpack/shutdown/NodeShutdownTasksIT.java @@ -21,15 +21,11 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.SettingsModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.persistent.AllocatedPersistentTask; import org.elasticsearch.persistent.PersistentTaskParams; import org.elasticsearch.persistent.PersistentTaskState; @@ -38,12 +34,8 @@ import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -57,7 +49,6 @@ import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.Supplier; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; @@ -134,23 +125,8 @@ public static class TaskPlugin extends Plugin implements PersistentTaskPlugin { TaskExecutor taskExecutor; @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - taskExecutor = new TaskExecutor(client, clusterService, threadPool); + public Collection createComponents(PluginServices services) { + taskExecutor = new TaskExecutor(services.client(), services.clusterService(), services.threadPool()); return Collections.singletonList(taskExecutor); } diff --git a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java index 754ba7970eaa2..8c85bdb11dfa2 100644 --- a/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java +++ b/x-pack/plugin/shutdown/src/main/java/org/elasticsearch/xpack/shutdown/ShutdownPlugin.java @@ -9,29 +9,16 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Arrays; import java.util.Collection; @@ -41,24 +28,9 @@ public class ShutdownPlugin extends Plugin implements ActionPlugin { @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { - NodeSeenService nodeSeenService = new NodeSeenService(clusterService); + NodeSeenService nodeSeenService = new NodeSeenService(services.clusterService()); return Collections.singletonList(nodeSeenService); } diff --git a/x-pack/plugin/slm/qa/multi-node/build.gradle b/x-pack/plugin/slm/qa/multi-node/build.gradle index 999f6e6a79c7c..b02fe7cd44fbd 100644 --- a/x-pack/plugin/slm/qa/multi-node/build.gradle +++ b/x-pack/plugin/slm/qa/multi-node/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.internal.info.BuildParams +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' @@ -12,14 +13,14 @@ File repoDir = file("$buildDir/testclusters/repo") tasks.named("javaRestTest").configure { /* To support taking index snapshots, we have to set path.repo setting */ - systemProperty 'tests.path.repo', repoDir + nonInputProperties.systemProperty 'tests.path.repo', repoDir } testClusters.configureEach { testDistribution = 'DEFAULT' numberOfNodes = 4 - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' setting 'xpack.security.enabled', 'false' diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index b17fbb6a0371f..6699097e847a1 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -271,9 +271,9 @@ public void testRetentionWhileSnapshotInProgress() throws Exception { // Assert that the history document has been written for taking the snapshot and deleting it assertBusy(() -> { - SearchResponse resp = client().prepareSearch(".slm-history*") - .setQuery(QueryBuilders.matchQuery("snapshot_name", completedSnapshotName)) - .get(); + SearchResponse resp = prepareSearch(".slm-history*").setQuery( + QueryBuilders.matchQuery("snapshot_name", completedSnapshotName) + ).get(); logger.info( "--> checking history written for {}, got: {}", completedSnapshotName, diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java index 20f6f2d57571d..bd2d040d76299 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; import org.elasticsearch.repositories.GetSnapshotInfoContext; @@ -125,7 +126,12 @@ protected void doExecute(Task task, Request request, ActionListener li perRepositoryListener -> SubscribableListener // Get repository data - .newForked(repository::getRepositoryData) + .newForked( + l -> repository.getRepositoryData( + EsExecutors.DIRECT_EXECUTOR_SERVICE, // TODO use retentionExecutor, see #101445? + l + ) + ) // Collect snapshot details by policy, and get any missing details by reading SnapshotInfo .andThen( diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java index 3664b4efbcf9a..74094c83d4bcb 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/SnapshotLifecycle.java @@ -15,31 +15,22 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.health.HealthIndicatorService; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.reservedstate.ReservedClusterStateHandler; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.XPackPlugin; @@ -83,6 +74,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -123,22 +115,10 @@ protected XPackLicenseState getLicenseState() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); final List components = new ArrayList<>(); SnapshotLifecycleTemplateRegistry templateRegistry = new SnapshotLifecycleTemplateRegistry( @@ -146,7 +126,7 @@ public Collection createComponents( clusterService, threadPool, client, - xContentRegistry + services.xContentRegistry() ); templateRegistry.initialize(); snapshotHistoryStore.set(new SnapshotHistoryStore(new OriginSettingClient(client, INDEX_LIFECYCLE_ORIGIN), clusterService)); @@ -167,7 +147,7 @@ public Collection createComponents( ) ); snapshotRetentionService.get().init(clusterService); - components.addAll(Arrays.asList(snapshotLifecycleService.get(), snapshotHistoryStore.get(), snapshotRetentionService.get())); + Collections.addAll(components, snapshotLifecycleService.get(), snapshotHistoryStore.get(), snapshotRetentionService.get()); slmHealthIndicatorService.set(new SlmHealthIndicatorService(clusterService)); return components; diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java index 5dd48946c270c..c876bb83f919d 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/SLMGetExpiredSnapshotsActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; +import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexVersion; @@ -176,7 +177,7 @@ record SeenSnapshotInfo(SnapshotId snapshotId, String policyId) {} SubscribableListener - .newForked(repository::getRepositoryData) + .newForked(l -> repository.getRepositoryData(EsExecutors.DIRECT_EXECUTOR_SERVICE, l)) .andThen( (l, rd) -> SLMGetExpiredSnapshotsAction.getSnapshotDetailsByPolicy(repository, rd, l) @@ -257,7 +258,7 @@ private static RepositoryData.SnapshotDetails mkDetails(String policyId) { private static Repository createMockRepository(ThreadPool threadPool, List snapshotInfos) { final var repository = mock(Repository.class); doAnswer(invocation -> { - final ActionListener listener = invocation.getArgument(0); + final ActionListener listener = invocation.getArgument(1); threadPool.generic().execute(ActionRunnable.supply(listener, () -> { var repositoryData = RepositoryData.EMPTY; for (SnapshotInfo snapshotInfo : snapshotInfos) { @@ -282,7 +283,7 @@ private static Repository createMockRepository(ThreadPool threadPool, List { final GetSnapshotInfoContext getSnapshotInfoContext = invocation.getArgument(0); diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/fs/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/fs/build.gradle index 0c0c1930f8601..d116a2d11e22d 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/fs/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/fs/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.rest-resources' @@ -25,7 +27,7 @@ tasks.withType(Test).configureEach { doFirst { delete(repoDir) } - systemProperty 'tests.path.repo', repoDir + nonInputProperties.systemProperty 'tests.path.repo', repoDir } testClusters.matching { it.name == "javaRestTest" }.configureEach { @@ -33,6 +35,6 @@ testClusters.matching { it.name == "javaRestTest" }.configureEach { numberOfNodes = 3 setting 'xpack.license.self_generated.type', 'trial' - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.security.enabled', 'false' } diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/license-enforcing/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/license-enforcing/build.gradle index 1c9a877507791..d2121daa1ba93 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/license-enforcing/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/license-enforcing/build.gradle @@ -27,7 +27,7 @@ tasks.withType(Test).configureEach { doFirst { delete(repoDir) } - systemProperty 'tests.path.repo', repoDir + nonInputProperties.systemProperty 'tests.path.repo', repoDir } testClusters.matching { it.name == "javaRestTest" }.configureEach { @@ -37,6 +37,6 @@ testClusters.matching { it.name == "javaRestTest" }.configureEach { // This project tests that enterprise licensing is enforced, // therefore we use a basic license setting 'xpack.license.self_generated.type', 'basic' - setting 'path.repo', repoDir.absolutePath + setting 'path.repo', repoDir.absolutePath, IGNORE_VALUE setting 'xpack.security.enabled', 'false' } diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index e0ea9d4ff076c..c37cc7b092aba 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -317,19 +317,11 @@ public void testPeerRecoveryUsesSnapshots() throws Exception { String targetNode = internalCluster().startDataOnlyNode(); - MockTransportService sourceMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - sourceNode - ); - MockTransportService targetMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - targetNode - ); - - sourceMockTransportService.addSendBehavior(targetMockTransportService, (connection, requestId, action, request, options) -> { - assertNotEquals(PeerRecoveryTargetService.Actions.FILE_CHUNK, action); - connection.sendRequest(requestId, action, request, options); - }); + MockTransportService.getInstance(sourceNode) + .addSendBehavior(MockTransportService.getInstance(targetNode), (connection, requestId, action, request, options) -> { + assertNotEquals(PeerRecoveryTargetService.Actions.FILE_CHUNK, action); + connection.sendRequest(requestId, action, request, options); + }); updateIndexSettings(Settings.builder().put("index.routing.allocation.require._name", targetNode), indexName); @@ -597,23 +589,19 @@ public void testRecoveryIsCancelledAfterDeletingTheIndex() throws Exception { targetNode = internalCluster().startDataOnlyNode(); } - MockTransportService targetMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - targetNode - ); - CountDownLatch recoverSnapshotFileRequestReceived = new CountDownLatch(1); CountDownLatch respondToRecoverSnapshotFile = new CountDownLatch(1); AtomicInteger numberOfRecoverSnapshotFileRequestsReceived = new AtomicInteger(); - targetMockTransportService.addRequestHandlingBehavior( - PeerRecoveryTargetService.Actions.RESTORE_FILE_FROM_SNAPSHOT, - (handler, request, channel, task) -> { - assertThat(numberOfRecoverSnapshotFileRequestsReceived.incrementAndGet(), is(equalTo(1))); - recoverSnapshotFileRequestReceived.countDown(); - respondToRecoverSnapshotFile.await(); - handler.messageReceived(request, channel, task); - } - ); + MockTransportService.getInstance(targetNode) + .addRequestHandlingBehavior( + PeerRecoveryTargetService.Actions.RESTORE_FILE_FROM_SNAPSHOT, + (handler, request, channel, task) -> { + assertThat(numberOfRecoverSnapshotFileRequestsReceived.incrementAndGet(), is(equalTo(1))); + recoverSnapshotFileRequestReceived.countDown(); + respondToRecoverSnapshotFile.await(); + handler.messageReceived(request, channel, task); + } + ); if (seqNoRecovery) { ClusterState clusterState = clusterAdmin().prepareState().get().getState(); @@ -728,7 +716,7 @@ public void sendResponse(Exception exception) throws IOException { try { channel.sendResponse(exception); } catch (IOException e) { - throw new AssertionError("unexpected", e); + fail(e); } }); } @@ -1257,10 +1245,7 @@ public void testRecoveryRetryKeepsTheGrantedSnapshotFileDownloadPermit() throws recoverySnapshotFileRequests, awaitForRecoverSnapshotFileRequestReceived, respondToRecoverSnapshotFile) -> { - MockTransportService sourceMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - sourceNode - ); + final var sourceMockTransportService = MockTransportService.getInstance(sourceNode); CountDownLatch startRecoveryRetryReceived = new CountDownLatch(1); AtomicBoolean delayRecoveryExceptionSent = new AtomicBoolean(); @@ -1363,15 +1348,8 @@ public void testNodeDisconnectsDoNotOverAccountRecoveredBytes() throws Exception .findFirst() .orElseThrow(); - MockTransportService sourceMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - replicaNodeName - ); - - MockTransportService targetMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - newReplicaNodeName - ); + final var sourceMockTransportService = MockTransportService.getInstance(replicaNodeName); + final var targetMockTransportService = MockTransportService.getInstance(newReplicaNodeName); final CountDownLatch firstDownloadStartLatch = new CountDownLatch(1); final CountDownLatch blockSnapshotFileDownload = new CountDownLatch(1); @@ -1479,10 +1457,7 @@ private void executeRecoveryWithSnapshotFileDownloadThrottled(SnapshotBasedRecov String sourceNode = dataNodes.get(0); String targetNode = dataNodes.get(1); - MockTransportService targetMockTransportService = (MockTransportService) internalCluster().getInstance( - TransportService.class, - targetNode - ); + final var targetMockTransportService = MockTransportService.getInstance(targetNode); List recoverySnapshotFileRequests = Collections.synchronizedList(new ArrayList<>()); CountDownLatch recoverSnapshotFileRequestReceived = new CountDownLatch(1); @@ -1632,9 +1607,7 @@ private void indexDocs(String indexName, int docIdOffset, int docCount) throws E private void assertDocumentsAreEqual(String indexName, int docCount) { assertDocCount(indexName, docCount); for (int testCase = 0; testCase < 3; testCase++) { - final SearchRequestBuilder searchRequestBuilder = client().prepareSearch(indexName) - .addSort("field", SortOrder.ASC) - .setSize(10_000); + final SearchRequestBuilder searchRequestBuilder = prepareSearch(indexName).addSort("field", SortOrder.ASC).setSize(10_000); SearchResponse searchResponse; switch (testCase) { diff --git a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java index 8a00c453382fb..45c7eb1b997b8 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/test/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/plan/SnapshotsRecoveryPlannerServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot; import org.elasticsearch.index.store.Store; @@ -219,7 +220,7 @@ public void testLogicallyEquivalentSnapshotIsUsedEvenIfFilesAreDifferent() throw if (snapshotVersion == null) { luceneVersion = randomVersionBetween( random(), - IndexVersion.V_7_0_0, + IndexVersions.V_7_0_0, RecoverySettings.SNAPSHOT_RECOVERIES_SUPPORTED_INDEX_VERSION ).luceneVersion(); } else { @@ -410,7 +411,7 @@ public void fetchLatestSnapshotsForShard(ShardId shardId, ActionListener RegisterAnalyzeAction.bytesFromLong(RegisterAnalyzeAction.longFromBytes(bytes) + 1)); + register.updateAndGet(bytes -> bytesFromLong(longFromBytes(bytes) + 1)); } return register.compareAndExchange(expected, updated); } @@ -317,11 +322,11 @@ public void testFailsIfRegisterHoldsSpuriousValue() { final long expectedMax = Math.max(request.getConcurrency(), internalCluster().getNodeNames().length); blobStore.setDisruption(new Disruption() { @Override - public BytesReference onCompareAndExchange(BytesRegister register, BytesReference expected, BytesReference updated) { + public BytesReference onContendedCompareAndExchange(BytesRegister register, BytesReference expected, BytesReference updated) { if (randomBoolean() && sawSpuriousValue.compareAndSet(false, true)) { - final var currentValue = RegisterAnalyzeAction.longFromBytes(register.get()); + final var currentValue = longFromBytes(register.get()); if (currentValue == expectedMax) { - return RegisterAnalyzeAction.bytesFromLong( + return bytesFromLong( randomFrom( randomLongBetween(0L, expectedMax - 1), randomLongBetween(expectedMax + 1, Long.MAX_VALUE), @@ -329,7 +334,7 @@ public BytesReference onCompareAndExchange(BytesRegister register, BytesReferenc ) ); } else { - return RegisterAnalyzeAction.bytesFromLong( + return bytesFromLong( randomFrom(expectedMax, randomLongBetween(expectedMax, Long.MAX_VALUE), randomLongBetween(Long.MIN_VALUE, -1)) ); } @@ -347,8 +352,47 @@ public BytesReference onCompareAndExchange(BytesRegister register, BytesReferenc } } - private RepositoryAnalyzeAction.Response analyseRepository(RepositoryAnalyzeAction.Request request) { - return client().execute(RepositoryAnalyzeAction.INSTANCE, request).actionGet(30L, TimeUnit.SECONDS); + public void testTimesOutSpinningRegisterAnalysis() { + final RepositoryAnalyzeAction.Request request = new RepositoryAnalyzeAction.Request("test-repo"); + request.timeout(TimeValue.timeValueMillis(between(1, 1000))); + + blobStore.setDisruption(new Disruption() { + @Override + public boolean compareAndExchangeReturnsWitness(String key) { + // let uncontended accesses succeed but all contended ones fail + return isContendedRegisterKey(key) == false; + } + }); + final var exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertThat(exception.getMessage(), containsString("analysis failed")); + assertThat( + asInstanceOf(RepositoryVerificationException.class, exception.getCause()).getMessage(), + containsString("analysis timed out") + ); + } + + public void testFailsIfAllRegisterOperationsInconclusive() { + final RepositoryAnalyzeAction.Request request = new RepositoryAnalyzeAction.Request("test-repo"); + blobStore.setDisruption(new Disruption() { + @Override + public boolean compareAndExchangeReturnsWitness(String key) { + return false; + } + }); + final var exception = expectThrows(RepositoryVerificationException.class, () -> analyseRepository(request)); + assertThat(exception.getMessage(), containsString("analysis failed")); + assertThat( + asInstanceOf(RepositoryVerificationException.class, ExceptionsHelper.unwrapCause(exception.getCause())).getMessage(), + allOf(containsString("uncontended register operation failed"), containsString("did not observe any value")) + ); + } + + private void analyseRepository(RepositoryAnalyzeAction.Request request) { + client().execute(RepositoryAnalyzeAction.INSTANCE, request).actionGet(30L, TimeUnit.SECONDS); + } + + private static void assertPurpose(OperationPurpose purpose) { + assertEquals(OperationPurpose.REPOSITORY_ANALYSIS, purpose); } public static class TestPlugin extends Plugin implements RepositoryPlugin { @@ -422,7 +466,9 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) {} + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { + assertPurpose(purpose); + } private void deleteContainer(DisruptableBlobContainer container) { blobContainer = null; @@ -458,7 +504,11 @@ default boolean createBlobOnAbort() { return false; } - default BytesReference onCompareAndExchange(BytesRegister register, BytesReference expected, BytesReference updated) { + default boolean compareAndExchangeReturnsWitness(String key) { + return true; + } + + default BytesReference onContendedCompareAndExchange(BytesRegister register, BytesReference expected, BytesReference updated) { return register.compareAndExchange(expected, updated); } } @@ -484,11 +534,13 @@ public BlobPath path() { @Override public boolean blobExists(OperationPurpose purpose, String blobName) { + assertPurpose(purpose); return blobs.containsKey(blobName); } @Override public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + assertPurpose(purpose); final byte[] actualContents = blobs.get(blobName); final byte[] disruptedContents = disruption.onRead(actualContents, 0L, actualContents == null ? 0L : actualContents.length); if (disruptedContents == null) { @@ -499,6 +551,7 @@ public InputStream readBlob(OperationPurpose purpose, String blobName) throws IO @Override public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + assertPurpose(purpose); final byte[] actualContents = blobs.get(blobName); final byte[] disruptedContents = disruption.onRead(actualContents, position, length); if (disruptedContents == null) { @@ -516,13 +569,15 @@ public void writeBlob( long blobSize, boolean failIfAlreadyExists ) throws IOException { + assertPurpose(purpose); writeBlobAtomic(blobName, inputStream, failIfAlreadyExists); } @Override public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(OperationPurpose.SNAPSHOT, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); + assertPurpose(purpose); + writeBlob(purpose, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @Override @@ -533,18 +588,20 @@ public void writeMetadataBlob( boolean atomic, CheckedConsumer writer ) throws IOException { + assertPurpose(purpose); final BytesStreamOutput out = new BytesStreamOutput(); writer.accept(out); if (atomic) { - writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); + writeBlobAtomic(purpose, blobName, out.bytes(), failIfAlreadyExists); } else { - writeBlob(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); + writeBlob(purpose, blobName, out.bytes(), failIfAlreadyExists); } } @Override public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assertPurpose(purpose); final StreamInput inputStream; try { inputStream = bytes.streamInput(); @@ -577,6 +634,7 @@ private void writeBlobAtomic(String blobName, InputStream inputStream, boolean f @Override public DeleteResult delete(OperationPurpose purpose) throws IOException { + assertPurpose(purpose); disruption.onDelete(); deleteContainer.accept(this); final DeleteResult deleteResult = new DeleteResult(blobs.size(), blobs.values().stream().mapToLong(b -> b.length).sum()); @@ -586,11 +644,13 @@ public DeleteResult delete(OperationPurpose purpose) throws IOException { @Override public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { + assertPurpose(purpose); blobNames.forEachRemaining(blobs.keySet()::remove); } @Override public Map listBlobs(OperationPurpose purpose) throws IOException { + assertPurpose(purpose); return disruption.onList( blobs.entrySet() .stream() @@ -600,12 +660,14 @@ public Map listBlobs(OperationPurpose purpose) throws IOEx @Override public Map children(OperationPurpose purpose) { + assertPurpose(purpose); return Map.of(); } @Override public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) throws IOException { - final Map blobMetadataByName = listBlobs(OperationPurpose.SNAPSHOT); + assertPurpose(purpose); + final Map blobMetadataByName = listBlobs(purpose); blobMetadataByName.keySet().removeIf(s -> s.startsWith(blobNamePrefix) == false); return blobMetadataByName; } @@ -618,9 +680,29 @@ public void compareAndExchangeRegister( BytesReference updated, ActionListener listener ) { - final var register = registers.computeIfAbsent(key, ignored -> new BytesRegister()); - listener.onResponse(OptionalBytesReference.of(disruption.onCompareAndExchange(register, expected, updated))); + assertPurpose(purpose); + final boolean isContendedRegister = isContendedRegisterKey(key); // validate key + if (disruption.compareAndExchangeReturnsWitness(key)) { + final var register = registers.computeIfAbsent(key, ignored -> new BytesRegister()); + if (isContendedRegister) { + listener.onResponse(OptionalBytesReference.of(disruption.onContendedCompareAndExchange(register, expected, updated))); + } else { + listener.onResponse(OptionalBytesReference.of(register.compareAndExchange(expected, updated))); + } + } else { + listener.onResponse(OptionalBytesReference.MISSING); + } + } + } + + static boolean isContendedRegisterKey(String key) { + if (key.startsWith(RepositoryAnalyzeAction.CONTENDED_REGISTER_NAME_PREFIX)) { + return true; + } + if (key.startsWith(RepositoryAnalyzeAction.UNCONTENDED_REGISTER_NAME_PREFIX)) { + return false; } + return fail(null, "unknown register: %s", key); } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java index 45b1bdc756789..d6c793984736f 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/internalClusterTest/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalysisSuccessIT.java @@ -57,6 +57,7 @@ import java.util.function.Consumer; import java.util.stream.Collectors; +import static org.elasticsearch.repositories.blobstore.testkit.RepositoryAnalysisFailureIT.isContendedRegisterKey; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -164,6 +165,10 @@ private static BlobPath buildBlobPath(Settings settings) { } } + private static void assertPurpose(OperationPurpose purpose) { + assertEquals(OperationPurpose.REPOSITORY_ANALYSIS, purpose); + } + static class AssertingRepository extends BlobStoreRepository { private final AtomicReference blobStoreRef = new AtomicReference<>(); @@ -239,7 +244,9 @@ private void deleteContainer(AssertingBlobContainer container) { } @Override - public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) {} + public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { + assertPurpose(purpose); + } @Override public void close() {} @@ -299,11 +306,13 @@ public BlobPath path() { @Override public boolean blobExists(OperationPurpose purpose, String blobName) { + assertPurpose(purpose); return blobs.containsKey(blobName); } @Override public InputStream readBlob(OperationPurpose purpose, String blobName) throws IOException { + assertPurpose(purpose); final byte[] contents = blobs.get(blobName); if (contents == null) { throw new FileNotFoundException(blobName + " not found"); @@ -313,6 +322,7 @@ public InputStream readBlob(OperationPurpose purpose, String blobName) throws IO @Override public InputStream readBlob(OperationPurpose purpose, String blobName, long position, long length) throws IOException { + assertPurpose(purpose); final byte[] contents = blobs.get(blobName); if (contents == null) { throw new FileNotFoundException(blobName + " not found"); @@ -329,6 +339,7 @@ public void writeBlob( long blobSize, boolean failIfAlreadyExists ) throws IOException { + assertPurpose(purpose); assertTrue("must only write blob [" + blobName + "] non-atomically if it doesn't already exist", failIfAlreadyExists); assertNull("blob [" + blobName + "] must not exist", blobs.get(blobName)); @@ -339,7 +350,8 @@ public void writeBlob( @Override public void writeBlob(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { - writeBlob(OperationPurpose.SNAPSHOT, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); + assertPurpose(purpose); + writeBlob(purpose, blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @Override @@ -350,18 +362,20 @@ public void writeMetadataBlob( boolean atomic, CheckedConsumer writer ) throws IOException { + assertPurpose(purpose); final BytesStreamOutput out = new BytesStreamOutput(); writer.accept(out); if (atomic) { - writeBlobAtomic(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); + writeBlobAtomic(purpose, blobName, out.bytes(), failIfAlreadyExists); } else { - writeBlob(OperationPurpose.SNAPSHOT, blobName, out.bytes(), failIfAlreadyExists); + writeBlob(purpose, blobName, out.bytes(), failIfAlreadyExists); } } @Override public void writeBlobAtomic(OperationPurpose purpose, String blobName, BytesReference bytes, boolean failIfAlreadyExists) throws IOException { + assertPurpose(purpose); writeBlobAtomic(blobName, bytes.streamInput(), bytes.length(), failIfAlreadyExists); } @@ -391,6 +405,7 @@ private void writeBlobAtomic(String blobName, InputStream inputStream, long blob @Override public DeleteResult delete(OperationPurpose purpose) { + assertPurpose(purpose); deleteContainer.accept(this); final DeleteResult deleteResult = new DeleteResult(blobs.size(), blobs.values().stream().mapToLong(b -> b.length).sum()); blobs.clear(); @@ -399,11 +414,13 @@ public DeleteResult delete(OperationPurpose purpose) { @Override public void deleteBlobsIgnoringIfNotExists(OperationPurpose purpose, Iterator blobNames) { + assertPurpose(purpose); blobNames.forEachRemaining(blobs.keySet()::remove); } @Override public Map listBlobs(OperationPurpose purpose) { + assertPurpose(purpose); return blobs.entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> new BlobMetadata(e.getKey(), e.getValue().length))); @@ -411,26 +428,32 @@ public Map listBlobs(OperationPurpose purpose) { @Override public Map children(OperationPurpose purpose) { + assertPurpose(purpose); return Map.of(); } @Override public Map listBlobsByPrefix(OperationPurpose purpose, String blobNamePrefix) { - final Map blobMetadataByName = listBlobs(OperationPurpose.SNAPSHOT); + assertPurpose(purpose); + final Map blobMetadataByName = listBlobs(purpose); blobMetadataByName.keySet().removeIf(s -> s.startsWith(blobNamePrefix) == false); return blobMetadataByName; } @Override public void getRegister(OperationPurpose purpose, String key, ActionListener listener) { - if (firstRegisterRead.compareAndSet(true, false) && randomBoolean() && randomBoolean()) { + assertPurpose(purpose); + if (isContendedRegisterKey(key) && firstRegisterRead.compareAndSet(true, false) && randomBoolean() && randomBoolean()) { + // it's ok if _contended_ register accesses are a little disrupted since they retry until success, however, // only fail the first read, we must not fail the final check listener.onResponse(OptionalBytesReference.EMPTY); } else if (randomBoolean()) { + // read the register directly listener.onResponse(OptionalBytesReference.of(registers.computeIfAbsent(key, ignored -> new BytesRegister()).get())); } else { + // read using a compare-and-exchange that cannot succeed, but which returns the current value anyway final var bogus = randomFrom(BytesArray.EMPTY, new BytesArray(new byte[] { randomByte() })); - compareAndExchangeRegister(OperationPurpose.SNAPSHOT, key, bogus, bogus, listener); + compareAndExchangeRegister(purpose, key, bogus, bogus, listener); } } @@ -442,17 +465,23 @@ public void compareAndExchangeRegister( BytesReference updated, ActionListener listener ) { - firstRegisterRead.set(false); - if (updated.length() > 1 && randomBoolean() && randomBoolean()) { - // updated.length() > 1 so we don't fail the final check because we know there can be no concurrent operations at that point - listener.onResponse(OptionalBytesReference.MISSING); - } else { - listener.onResponse( - OptionalBytesReference.of( - registers.computeIfAbsent(key, ignored -> new BytesRegister()).compareAndExchange(expected, updated) - ) - ); + assertPurpose(purpose); + if (isContendedRegisterKey(key)) { + // it's ok if _contended_ register accesses are a little disrupted since they retry until success + + firstRegisterRead.set(false); + if (updated.length() > 1 && randomBoolean() && randomBoolean()) { + // updated.length() > 1 so the final check succeeds because we know there can be no concurrent operations at that point + listener.onResponse(OptionalBytesReference.MISSING); + return; + } } + + listener.onResponse( + OptionalBytesReference.of( + registers.computeIfAbsent(key, ignored -> new BytesRegister()).compareAndExchange(expected, updated) + ) + ); } } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java index 72adf752737fc..d9c85eb37aaa0 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/BlobAnalyzeAction.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.HandledTransportAction; @@ -26,7 +25,6 @@ import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -148,55 +146,38 @@ * unnecessary resources. * */ -public class BlobAnalyzeAction extends ActionType { +class BlobAnalyzeAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(BlobAnalyzeAction.class); - public static final BlobAnalyzeAction INSTANCE = new BlobAnalyzeAction(); - public static final String NAME = "cluster:admin/repository/analyze/blob"; + static final String NAME = "cluster:admin/repository/analyze/blob"; - private BlobAnalyzeAction() { - super(NAME, Response::new); - } - - public static class TransportAction extends HandledTransportAction { + private final RepositoriesService repositoriesService; + private final TransportService transportService; - private static final Logger logger = BlobAnalyzeAction.logger; - - private final RepositoriesService repositoriesService; - private final TransportService transportService; + BlobAnalyzeAction(TransportService transportService, ActionFilters actionFilters, RepositoriesService repositoriesService) { + super(NAME, transportService, actionFilters, Request::new, transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT)); + this.repositoriesService = repositoriesService; + this.transportService = transportService; + } - @Inject - public TransportAction(TransportService transportService, ActionFilters actionFilters, RepositoriesService repositoriesService) { - super( - NAME, - transportService, - actionFilters, - Request::new, - transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT) - ); - this.repositoriesService = repositoriesService; - this.transportService = transportService; + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + final Repository repository = repositoriesService.repository(request.getRepositoryName()); + if (repository instanceof BlobStoreRepository == false) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); } + if (repository.isReadOnly()) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); + } + final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; + final BlobPath path = blobStoreRepository.basePath().add(request.blobPath); + final BlobContainer blobContainer = blobStoreRepository.blobStore().blobContainer(path); - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - final Repository repository = repositoriesService.repository(request.getRepositoryName()); - if (repository instanceof BlobStoreRepository == false) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); - } - if (repository.isReadOnly()) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); - } - final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; - final BlobPath path = blobStoreRepository.basePath().add(request.blobPath); - final BlobContainer blobContainer = blobStoreRepository.blobStore().blobContainer(path); - - logger.trace("handling [{}]", request); + logger.trace("handling [{}]", request); - assert task instanceof CancellableTask; - new BlobAnalysis(transportService, (CancellableTask) task, request, blobStoreRepository, blobContainer, listener).run(); - } + assert task instanceof CancellableTask; + new BlobAnalysis(transportService, (CancellableTask) task, request, blobStoreRepository, blobContainer, listener).run(); } /** @@ -208,7 +189,7 @@ protected void doExecute(Task task, Request request, ActionListener li /** * Analysis on a single blob, performing the write(s) and orchestrating the read(s). */ - static class BlobAnalysis { + private static class BlobAnalysis { private final TransportService transportService; private final CancellableTask task; private final BlobAnalyzeAction.Request request; @@ -348,18 +329,23 @@ public StreamInput streamInput() throws IOException { }; if (atomic) { try { - blobContainer.writeBlobAtomic(OperationPurpose.SNAPSHOT, request.blobName, bytesReference, failIfExists); + blobContainer.writeBlobAtomic( + OperationPurpose.REPOSITORY_ANALYSIS, + request.blobName, + bytesReference, + failIfExists + ); } catch (BlobWriteAbortedException e) { assert request.getAbortWrite() : "write unexpectedly aborted"; } } else { - blobContainer.writeBlob(OperationPurpose.SNAPSHOT, request.blobName, bytesReference, failIfExists); + blobContainer.writeBlob(OperationPurpose.REPOSITORY_ANALYSIS, request.blobName, bytesReference, failIfExists); } } else { cancellableThreads.execute(() -> { try { blobContainer.writeBlob( - OperationPurpose.SNAPSHOT, + OperationPurpose.REPOSITORY_ANALYSIS, request.blobName, repository.maybeRateLimitSnapshots( new RandomBlobContentStream(content, request.getTargetLength()), @@ -478,7 +464,7 @@ private void cleanUpAndReturnFailure(Exception exception) { logger.trace(() -> "analysis failed [" + request.getDescription() + "] cleaning up", exception); } try { - blobContainer.deleteBlobsIgnoringIfNotExists(OperationPurpose.SNAPSHOT, Iterators.single(request.blobName)); + blobContainer.deleteBlobsIgnoringIfNotExists(OperationPurpose.REPOSITORY_ANALYSIS, Iterators.single(request.blobName)); } catch (IOException ioException) { exception.addSuppressed(ioException); logger.warn( @@ -652,7 +638,7 @@ private WriteDetails(long bytesWritten, long elapsedNanos, long throttledNanos, } } - public static class Request extends ActionRequest { + static class Request extends ActionRequest { private final String repositoryName; private final String blobPath; private final String blobName; @@ -770,29 +756,29 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); } - public String getRepositoryName() { + String getRepositoryName() { return repositoryName; } - public String getBlobPath() { + String getBlobPath() { return blobPath; } - public String getBlobName() { + String getBlobName() { return blobName; } - public long getTargetLength() { + long getTargetLength() { return targetLength; } - public boolean getAbortWrite() { + boolean getAbortWrite() { return abortWrite; } } - public static class Response extends ActionResponse implements ToXContentObject { + static class Response extends ActionResponse implements ToXContentObject { private final String nodeId; private final String nodeName; @@ -808,7 +794,7 @@ public static class Response extends ActionResponse implements ToXContentObject private final long writeThrottledNanos; private final List readDetails; - public Response( + Response( String nodeId, String nodeName, String blobName, @@ -836,7 +822,7 @@ public Response( this.readDetails = readDetails; } - public Response(StreamInput in) throws IOException { + Response(StreamInput in) throws IOException { super(in); nodeId = in.readString(); nodeName = in.readString(); @@ -923,7 +909,7 @@ long getChecksumBytes() { } } - public static class ReadDetail implements Writeable, ToXContentFragment { + static class ReadDetail implements Writeable, ToXContentFragment { private final String nodeId; private final String nodeName; @@ -933,7 +919,7 @@ public static class ReadDetail implements Writeable, ToXContentFragment { private final long throttleNanos; private final long elapsedNanos; - public ReadDetail( + ReadDetail( String nodeId, String nodeName, boolean beforeWriteComplete, @@ -951,7 +937,7 @@ public ReadDetail( this.elapsedNanos = elapsedNanos; } - public ReadDetail(StreamInput in) throws IOException { + ReadDetail(StreamInput in) throws IOException { nodeId = in.readString(); nodeName = in.readString(); beforeWriteComplete = in.readBoolean(); diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/ContendedRegisterAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/ContendedRegisterAnalyzeAction.java new file mode 100644 index 0000000000000..8058b270d310e --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/ContendedRegisterAnalyzeAction.java @@ -0,0 +1,286 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.OptionalBytesReference; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.ByteUtils; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.Executor; + +/** + * An action which atomically increments a register using {@link BlobContainer#compareAndExchangeRegister}. There will be multiple parties + * accessing the register concurrently in order to test behaviour under contention. + */ +class ContendedRegisterAnalyzeAction extends HandledTransportAction { + + private static final Logger logger = LogManager.getLogger(ContendedRegisterAnalyzeAction.class); + + static final String NAME = "cluster:admin/repository/analyze/register"; + + private final RepositoriesService repositoriesService; + private final Executor executor; + + ContendedRegisterAnalyzeAction( + TransportService transportService, + ActionFilters actionFilters, + RepositoriesService repositoriesService + ) { + super(NAME, transportService, actionFilters, Request::new, transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT)); + this.repositoriesService = repositoriesService; + this.executor = transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener outerListenerOld) { + final var outerListener = ActionListener.assertOnce(outerListenerOld); + final Repository repository = repositoriesService.repository(request.getRepositoryName()); + if (repository instanceof BlobStoreRepository == false) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); + } + if (repository.isReadOnly()) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); + } + final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; + final BlobPath path = blobStoreRepository.basePath().add(request.getContainerPath()); + final BlobContainer blobContainer = blobStoreRepository.blobStore().blobContainer(path); + + logger.trace("handling [{}]", request); + + assert task instanceof CancellableTask; + + final String registerName = request.getRegisterName(); + final ActionListener initialValueListener = new ActionListener<>() { + @Override + public void onResponse(OptionalBytesReference maybeInitialBytes) { + final long initialValue = maybeInitialBytes.isPresent() ? longFromBytes(maybeInitialBytes.bytesReference()) : 0L; + + ActionListener.run(outerListener.map(ignored -> ActionResponse.Empty.INSTANCE), l -> { + if (initialValue < 0 || initialValue >= request.getRequestCount()) { + throw new IllegalStateException("register holds unexpected value [" + initialValue + "]"); + } + + class Execution extends ActionRunnable { + private long currentValue; + + private final ActionListener witnessListener; + + Execution(long currentValue) { + super(l); + this.currentValue = currentValue; + this.witnessListener = listener.delegateFailure(this::handleWitness); + } + + @Override + protected void doRun() { + if (((CancellableTask) task).notifyIfCancelled(listener) == false) { + blobContainer.compareAndExchangeRegister( + OperationPurpose.REPOSITORY_ANALYSIS, + registerName, + bytesFromLong(currentValue), + bytesFromLong(currentValue + 1L), + witnessListener + ); + } + } + + private void handleWitness(ActionListener delegate, OptionalBytesReference witnessOrEmpty) { + if (witnessOrEmpty.isPresent() == false) { + // Concurrent activity prevented us from updating the value, or even reading the concurrently-updated + // result, so we must just try again. + executor.execute(Execution.this); + return; + } + + final long witness = longFromBytes(witnessOrEmpty.bytesReference()); + if (witness == currentValue) { + delegate.onResponse(null); + } else if (witness < currentValue || witness >= request.getRequestCount()) { + delegate.onFailure(new IllegalStateException("register holds unexpected value [" + witness + "]")); + } else { + currentValue = witness; + executor.execute(Execution.this); + } + } + + } + + new Execution(initialValue).run(); + + }); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof UnsupportedOperationException) { + // Registers are not supported on all repository types, and that's ok. If it's not supported here then the final + // check will also be unsupported, so it doesn't matter that we didn't do anything before this successful response. + outerListener.onResponse(ActionResponse.Empty.INSTANCE); + } else { + outerListener.onFailure(e); + } + } + }; + + if (request.getInitialRead() > request.getRequestCount()) { + blobContainer.getRegister(OperationPurpose.REPOSITORY_ANALYSIS, registerName, initialValueListener); + } else { + blobContainer.compareAndExchangeRegister( + OperationPurpose.REPOSITORY_ANALYSIS, + registerName, + bytesFromLong(request.getInitialRead()), + bytesFromLong( + request.getInitialRead() == request.getRequestCount() ? request.getRequestCount() + 1 : request.getInitialRead() + ), + initialValueListener + ); + } + } + + static class Request extends ActionRequest { + private final String repositoryName; + private final String containerPath; + private final String registerName; + private final int requestCount; + private final int initialRead; + + Request(String repositoryName, String containerPath, String registerName, int requestCount, int initialRead) { + this.repositoryName = repositoryName; + this.containerPath = containerPath; + this.registerName = registerName; + this.requestCount = requestCount; + this.initialRead = initialRead; + } + + Request(StreamInput in) throws IOException { + super(in); + assert in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); + repositoryName = in.readString(); + containerPath = in.readString(); + registerName = in.readString(); + requestCount = in.readVInt(); + initialRead = in.readVInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + assert out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); + super.writeTo(out); + out.writeString(repositoryName); + out.writeString(containerPath); + out.writeString(registerName); + out.writeVInt(requestCount); + out.writeVInt(initialRead); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + String getRepositoryName() { + return repositoryName; + } + + String getContainerPath() { + return containerPath; + } + + String getRegisterName() { + return registerName; + } + + int getRequestCount() { + return requestCount; + } + + int getInitialRead() { + return initialRead; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String toString() { + return getDescription(); + } + + @Override + public String getDescription() { + return Strings.format( + """ + ContendedRegisterAnalyzeAction.Request{\ + repositoryName='%s', containerPath='%s', registerName='%s', requestCount='%d', initialRead='%d'}""", + repositoryName, + containerPath, + registerName, + requestCount, + initialRead + ); + } + } + + static long longFromBytes(BytesReference bytesReference) { + if (bytesReference.length() == 0) { + return 0L; + } else if (bytesReference.length() == Long.BYTES) { + try (var baos = new ByteArrayOutputStream(Long.BYTES)) { + bytesReference.writeTo(baos); + final var bytes = baos.toByteArray(); + assert bytes.length == Long.BYTES; + return ByteUtils.readLongBE(bytes, 0); + } catch (IOException e) { + assert false : "no IO takes place"; + throw new IllegalStateException("unexpected conversion error", e); + } + } else { + throw new IllegalArgumentException("cannot read long from BytesReference of length " + bytesReference.length()); + } + } + + static BytesReference bytesFromLong(long value) { + if (value == 0L) { + return BytesArray.EMPTY; + } else { + final var bytes = new byte[Long.BYTES]; + ByteUtils.writeLongBE(value, bytes, 0); + return new BytesArray(bytes); + } + } +} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java index c27271e28130b..f706ff79bf073 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/GetBlobChecksumAction.java @@ -13,12 +13,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.OperationPurpose; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -46,158 +44,139 @@ * (possibly the entire blob) and compute its checksum. It is acceptable if the blob is not found but we do not accept the blob being * otherwise unreadable. */ -public class GetBlobChecksumAction extends ActionType { +class GetBlobChecksumAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(GetBlobChecksumAction.class); - public static final GetBlobChecksumAction INSTANCE = new GetBlobChecksumAction(); + static final String NAME = "cluster:admin/repository/analyze/blob/read"; - public static final String NAME = "cluster:admin/repository/analyze/blob/read"; + private static final int BUFFER_SIZE = ByteSizeUnit.KB.toIntBytes(8); - private GetBlobChecksumAction() { - super(NAME, Response::new); - } - - public static class TransportAction extends HandledTransportAction { + private final RepositoriesService repositoriesService; - private static final Logger logger = GetBlobChecksumAction.logger; + GetBlobChecksumAction(TransportService transportService, ActionFilters actionFilters, RepositoriesService repositoriesService) { + super(NAME, transportService, actionFilters, Request::new, transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT)); + this.repositoriesService = repositoriesService; + } - private static final int BUFFER_SIZE = ByteSizeUnit.KB.toIntBytes(8); + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { - private final RepositoriesService repositoriesService; + assert task instanceof CancellableTask; + CancellableTask cancellableTask = (CancellableTask) task; - @Inject - public TransportAction(TransportService transportService, ActionFilters actionFilters, RepositoriesService repositoriesService) { - super( - NAME, - transportService, - actionFilters, - Request::new, - transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT) - ); - this.repositoriesService = repositoriesService; + final Repository repository = repositoriesService.repository(request.getRepositoryName()); + if (repository instanceof BlobStoreRepository == false) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob store repository"); } - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - - assert task instanceof CancellableTask; - CancellableTask cancellableTask = (CancellableTask) task; - - final Repository repository = repositoriesService.repository(request.getRepositoryName()); - if (repository instanceof BlobStoreRepository == false) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob store repository"); + final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; + final BlobContainer blobContainer = blobStoreRepository.blobStore() + .blobContainer(blobStoreRepository.basePath().add(request.getBlobPath())); + + logger.trace("handling [{}]", request); + + final InputStream rawInputStream; + try { + if (request.isWholeBlob()) { + rawInputStream = blobContainer.readBlob(OperationPurpose.REPOSITORY_ANALYSIS, request.getBlobName()); + } else { + rawInputStream = blobContainer.readBlob( + OperationPurpose.REPOSITORY_ANALYSIS, + request.getBlobName(), + request.getRangeStart(), + request.getRangeLength() + ); } + } catch (FileNotFoundException | NoSuchFileException e) { + logger.trace("blob not found for [{}]", request); + listener.onResponse(Response.BLOB_NOT_FOUND); + return; + } catch (IOException e) { + logger.warn("failed to read blob for [{}]", request); + listener.onFailure(e); + return; + } - final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; - final BlobContainer blobContainer = blobStoreRepository.blobStore() - .blobContainer(blobStoreRepository.basePath().add(request.getBlobPath())); - - logger.trace("handling [{}]", request); - - final InputStream rawInputStream; - try { - if (request.isWholeBlob()) { - rawInputStream = blobContainer.readBlob(OperationPurpose.SNAPSHOT, request.getBlobName()); - } else { - rawInputStream = blobContainer.readBlob( - OperationPurpose.SNAPSHOT, - request.getBlobName(), - request.getRangeStart(), - request.getRangeLength() - ); + logger.trace("reading blob for [{}]", request); + + final AtomicLong throttleNanos = new AtomicLong(); + final InputStream throttledInputStream = blobStoreRepository.maybeRateLimitRestores(rawInputStream, throttleNanos::addAndGet); + final CRC32 crc32 = new CRC32(); + final byte[] buffer = new byte[BUFFER_SIZE]; + long bytesRead = 0L; + final long startTimeNanos = System.nanoTime(); + long firstByteNanos = startTimeNanos; + + boolean success = false; + try { + while (true) { + final int readSize; + try { + readSize = throttledInputStream.read(buffer, 0, buffer.length); + } catch (IOException e) { + logger.warn("exception while read blob for [{}]", request); + listener.onFailure(e); + return; } - } catch (FileNotFoundException | NoSuchFileException e) { - logger.trace("blob not found for [{}]", request); - listener.onResponse(Response.BLOB_NOT_FOUND); - return; - } catch (IOException e) { - logger.warn("failed to read blob for [{}]", request); - listener.onFailure(e); - return; - } - - logger.trace("reading blob for [{}]", request); - - final AtomicLong throttleNanos = new AtomicLong(); - final InputStream throttledInputStream = blobStoreRepository.maybeRateLimitRestores(rawInputStream, throttleNanos::addAndGet); - final CRC32 crc32 = new CRC32(); - final byte[] buffer = new byte[BUFFER_SIZE]; - long bytesRead = 0L; - final long startTimeNanos = System.nanoTime(); - long firstByteNanos = startTimeNanos; - - boolean success = false; - try { - while (true) { - final int readSize; - try { - readSize = throttledInputStream.read(buffer, 0, buffer.length); - } catch (IOException e) { - logger.warn("exception while read blob for [{}]", request); - listener.onFailure(e); - return; - } - if (readSize == -1) { - break; - } - - if (readSize > 0) { - if (bytesRead == 0L) { - firstByteNanos = System.nanoTime(); - } + if (readSize == -1) { + break; + } - crc32.update(buffer, 0, readSize); - bytesRead += readSize; + if (readSize > 0) { + if (bytesRead == 0L) { + firstByteNanos = System.nanoTime(); } - if (cancellableTask.isCancelled()) { - throw new RepositoryVerificationException( - request.repositoryName, - "cancelled [" + request.getDescription() + "] after reading [" + bytesRead + "] bytes" - ); - } + crc32.update(buffer, 0, readSize); + bytesRead += readSize; } - success = true; - } finally { - if (success == false) { - IOUtils.closeWhileHandlingException(throttledInputStream); + + if (cancellableTask.isCancelled()) { + throw new RepositoryVerificationException( + request.repositoryName, + "cancelled [" + request.getDescription() + "] after reading [" + bytesRead + "] bytes" + ); } } - try { - throttledInputStream.close(); - } catch (IOException e) { - throw new RepositoryVerificationException( - request.repositoryName, - "failed to close input stream when handling [" + request.getDescription() + "]", - e - ); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(throttledInputStream); } + } + try { + throttledInputStream.close(); + } catch (IOException e) { + throw new RepositoryVerificationException( + request.repositoryName, + "failed to close input stream when handling [" + request.getDescription() + "]", + e + ); + } - final long endTimeNanos = System.nanoTime(); - - if (request.isWholeBlob() == false && bytesRead != request.getRangeLength()) { - throw new RepositoryVerificationException( - request.repositoryName, - "unexpectedly read [" + bytesRead + "] bytes when handling [" + request.getDescription() + "]" - ); - } + final long endTimeNanos = System.nanoTime(); - final Response response = new Response( - bytesRead, - crc32.getValue(), - firstByteNanos - startTimeNanos, - endTimeNanos - startTimeNanos, - throttleNanos.get() + if (request.isWholeBlob() == false && bytesRead != request.getRangeLength()) { + throw new RepositoryVerificationException( + request.repositoryName, + "unexpectedly read [" + bytesRead + "] bytes when handling [" + request.getDescription() + "]" ); - logger.trace("responding to [{}] with [{}]", request, response); - listener.onResponse(response); } + final Response response = new Response( + bytesRead, + crc32.getValue(), + firstByteNanos - startTimeNanos, + endTimeNanos - startTimeNanos, + throttleNanos.get() + ); + logger.trace("responding to [{}] with [{}]", request, response); + listener.onResponse(response); } - public static class Request extends ActionRequest { + static class Request extends ActionRequest { private final String repositoryName; private final String blobPath; @@ -230,29 +209,29 @@ public ActionRequestValidationException validate() { this.rangeEnd = rangeEnd; } - public String getRepositoryName() { + String getRepositoryName() { return repositoryName; } - public String getBlobPath() { + String getBlobPath() { return blobPath; } - public String getBlobName() { + String getBlobName() { return blobName; } - public long getRangeStart() { + long getRangeStart() { assert isWholeBlob() == false; return rangeStart; } - public long getRangeEnd() { + long getRangeEnd() { assert isWholeBlob() == false; return rangeEnd; } - public long getRangeLength() { + long getRangeLength() { assert isWholeBlob() == false; return rangeEnd - rangeStart; } @@ -303,7 +282,7 @@ public boolean shouldCancelChildrenOnCancellation() { } } - public static class Response extends ActionResponse { + static class Response extends ActionResponse { static Response BLOB_NOT_FOUND = new Response(0L, 0L, 0L, 0L, 0L); @@ -355,27 +334,27 @@ public String toString() { + '}'; } - public long getBytesRead() { + long getBytesRead() { return bytesRead; } - public long getChecksum() { + long getChecksum() { return checksum; } - public long getFirstByteNanos() { + long getFirstByteNanos() { return firstByteNanos; } - public long getElapsedNanos() { + long getElapsedNanos() { return elapsedNanos; } - public long getThrottleNanos() { + long getThrottleNanos() { return throttleNanos; } - public boolean isNotFound() { + boolean isNotFound() { return bytesRead == 0L && checksum == 0L && firstByteNanos == 0L && elapsedNanos == 0L && throttleNanos == 0L; } diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java deleted file mode 100644 index 641d18c4204b8..0000000000000 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RegisterAnalyzeAction.java +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.repositories.blobstore.testkit; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.ActionRunnable; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.blobstore.BlobContainer; -import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.OperationPurpose; -import org.elasticsearch.common.blobstore.OptionalBytesReference; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.util.ByteUtils; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.repositories.Repository; -import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import org.elasticsearch.tasks.CancellableTask; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.TransportService; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.Map; -import java.util.concurrent.ExecutorService; - -/** - * An action which atomically increments a register using {@link BlobContainer#compareAndExchangeRegister}. - */ -public class RegisterAnalyzeAction extends ActionType { - - private static final Logger logger = LogManager.getLogger(RegisterAnalyzeAction.class); - - public static final RegisterAnalyzeAction INSTANCE = new RegisterAnalyzeAction(); - public static final String NAME = "cluster:admin/repository/analyze/register"; - - private RegisterAnalyzeAction() { - super(NAME, in -> ActionResponse.Empty.INSTANCE); - } - - public static class TransportAction extends HandledTransportAction { - - private static final Logger logger = RegisterAnalyzeAction.logger; - - private final RepositoriesService repositoriesService; - private final ExecutorService executor; - - @Inject - public TransportAction(TransportService transportService, ActionFilters actionFilters, RepositoriesService repositoriesService) { - super( - NAME, - transportService, - actionFilters, - Request::new, - transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT) - ); - this.repositoriesService = repositoriesService; - this.executor = transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT); - } - - @Override - protected void doExecute(Task task, Request request, ActionListener outerListenerOld) { - final var outerListener = ActionListener.assertOnce(outerListenerOld); - final Repository repository = repositoriesService.repository(request.getRepositoryName()); - if (repository instanceof BlobStoreRepository == false) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); - } - if (repository.isReadOnly()) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); - } - final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; - final BlobPath path = blobStoreRepository.basePath().add(request.getContainerPath()); - final BlobContainer blobContainer = blobStoreRepository.blobStore().blobContainer(path); - - logger.trace("handling [{}]", request); - - assert task instanceof CancellableTask; - - final String registerName = request.getRegisterName(); - final ActionListener initialValueListener = new ActionListener<>() { - @Override - public void onResponse(OptionalBytesReference maybeInitialBytes) { - final long initialValue = maybeInitialBytes.isPresent() ? longFromBytes(maybeInitialBytes.bytesReference()) : 0L; - - ActionListener.run(outerListener.map(ignored -> ActionResponse.Empty.INSTANCE), l -> { - if (initialValue < 0 || initialValue >= request.getRequestCount()) { - throw new IllegalStateException("register holds unexpected value [" + initialValue + "]"); - } - - class Execution extends ActionRunnable { - private long currentValue; - - private final ActionListener witnessListener; - - Execution(long currentValue) { - super(l); - this.currentValue = currentValue; - this.witnessListener = listener.delegateFailure(this::handleWitness); - } - - @Override - protected void doRun() { - if (((CancellableTask) task).notifyIfCancelled(listener) == false) { - blobContainer.compareAndExchangeRegister( - OperationPurpose.SNAPSHOT, - registerName, - bytesFromLong(currentValue), - bytesFromLong(currentValue + 1L), - witnessListener - ); - } - } - - private void handleWitness(ActionListener delegate, OptionalBytesReference witnessOrEmpty) { - if (witnessOrEmpty.isPresent() == false) { - // Concurrent activity prevented us from updating the value, or even reading the concurrently-updated - // result, so we must just try again. - executor.execute(Execution.this); - return; - } - - final long witness = longFromBytes(witnessOrEmpty.bytesReference()); - if (witness == currentValue) { - delegate.onResponse(null); - } else if (witness < currentValue || witness >= request.getRequestCount()) { - delegate.onFailure(new IllegalStateException("register holds unexpected value [" + witness + "]")); - } else { - currentValue = witness; - executor.execute(Execution.this); - } - } - - } - - new Execution(initialValue).run(); - - }); - } - - @Override - public void onFailure(Exception e) { - if (e instanceof UnsupportedOperationException) { - // Registers are not supported on all repository types, and that's ok. If it's not supported here then the final - // check will also be unsupported, so it doesn't matter that we didn't do anything before this successful response. - outerListener.onResponse(ActionResponse.Empty.INSTANCE); - } else { - outerListener.onFailure(e); - } - } - }; - - if (request.getInitialRead() > request.getRequestCount()) { - blobContainer.getRegister(OperationPurpose.SNAPSHOT, registerName, initialValueListener); - } else { - blobContainer.compareAndExchangeRegister( - OperationPurpose.SNAPSHOT, - registerName, - bytesFromLong(request.getInitialRead()), - bytesFromLong( - request.getInitialRead() == request.getRequestCount() ? request.getRequestCount() + 1 : request.getInitialRead() - ), - initialValueListener - ); - } - } - } - - public static class Request extends ActionRequest { - private final String repositoryName; - private final String containerPath; - private final String registerName; - private final int requestCount; - private final int initialRead; - - public Request(String repositoryName, String containerPath, String registerName, int requestCount, int initialRead) { - this.repositoryName = repositoryName; - this.containerPath = containerPath; - this.registerName = registerName; - this.requestCount = requestCount; - this.initialRead = initialRead; - } - - public Request(StreamInput in) throws IOException { - super(in); - assert in.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); - repositoryName = in.readString(); - containerPath = in.readString(); - registerName = in.readString(); - requestCount = in.readVInt(); - initialRead = in.readVInt(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - assert out.getTransportVersion().onOrAfter(TransportVersions.V_8_8_0); - super.writeTo(out); - out.writeString(repositoryName); - out.writeString(containerPath); - out.writeString(registerName); - out.writeVInt(requestCount); - out.writeVInt(initialRead); - } - - @Override - public ActionRequestValidationException validate() { - return null; - } - - public String getRepositoryName() { - return repositoryName; - } - - public String getContainerPath() { - return containerPath; - } - - public String getRegisterName() { - return registerName; - } - - public int getRequestCount() { - return requestCount; - } - - public int getInitialRead() { - return initialRead; - } - - @Override - public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { - return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); - } - - @Override - public String toString() { - return getDescription(); - } - - @Override - public String getDescription() { - return Strings.format( - """ - RegisterAnalyzeAction.Request{\ - repositoryName='%s', containerPath='%s', registerName='%s', requestCount='%d', initialRead='%d'}""", - repositoryName, - containerPath, - registerName, - requestCount, - initialRead - ); - } - } - - static long longFromBytes(BytesReference bytesReference) { - if (bytesReference.length() == 0) { - return 0L; - } else if (bytesReference.length() == Long.BYTES) { - try (var baos = new ByteArrayOutputStream(Long.BYTES)) { - bytesReference.writeTo(baos); - final var bytes = baos.toByteArray(); - assert bytes.length == Long.BYTES; - return ByteUtils.readLongBE(bytes, 0); - } catch (IOException e) { - assert false : "no IO takes place"; - throw new IllegalStateException("unexpected conversion error", e); - } - } else { - throw new IllegalArgumentException("cannot read long from BytesReference of length " + bytesReference.length()); - } - } - - static BytesReference bytesFromLong(long value) { - if (value == 0L) { - return BytesArray.EMPTY; - } else { - final var bytes = new byte[Long.BYTES]; - ByteUtils.writeLongBE(value, bytes, 0); - return new BytesArray(bytes); - } - } -} diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java index d8e3c82433704..56e5d5c8c0bb1 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/RepositoryAnalyzeAction.java @@ -9,9 +9,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.ActionRequest; @@ -22,6 +23,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -72,6 +74,7 @@ import java.util.Random; import java.util.Set; import java.util.concurrent.Semaphore; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -80,6 +83,8 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.repositories.blobstore.testkit.BlobAnalyzeAction.MAX_ATOMIC_WRITE_SIZE; +import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.bytesFromLong; +import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.longFromBytes; import static org.elasticsearch.repositories.blobstore.testkit.SnapshotRepositoryTestKit.humanReadableNanos; /** @@ -87,98 +92,100 @@ * the results. Tries to fail fast by cancelling everything if any child task fails, or the timeout is reached, to avoid consuming * unnecessary resources. On completion, does a best-effort wait until the blob list contains all the expected blobs, then deletes them all. */ -public class RepositoryAnalyzeAction extends ActionType { +public class RepositoryAnalyzeAction extends HandledTransportAction { private static final Logger logger = LogManager.getLogger(RepositoryAnalyzeAction.class); - public static final RepositoryAnalyzeAction INSTANCE = new RepositoryAnalyzeAction(); - public static final String NAME = "cluster:admin/repository/analyze"; - - private RepositoryAnalyzeAction() { - super(NAME, Response::new); + public static final ActionType INSTANCE = ActionType.localOnly("cluster:admin/repository/analyze"); + + static final String UNCONTENDED_REGISTER_NAME_PREFIX = "test-register-uncontended-"; + static final String CONTENDED_REGISTER_NAME_PREFIX = "test-register-contended-"; + + private final TransportService transportService; + private final ClusterService clusterService; + private final RepositoriesService repositoriesService; + + @Inject + public RepositoryAnalyzeAction( + TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + RepositoriesService repositoriesService + ) { + super(INSTANCE.name(), transportService, actionFilters, RepositoryAnalyzeAction.Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); + this.transportService = transportService; + this.clusterService = clusterService; + this.repositoriesService = repositoriesService; + + // construct (and therefore implicitly register) the subsidiary actions + new BlobAnalyzeAction(transportService, actionFilters, repositoriesService); + new GetBlobChecksumAction(transportService, actionFilters, repositoriesService); + new ContendedRegisterAnalyzeAction(transportService, actionFilters, repositoriesService); + new UncontendedRegisterAnalyzeAction(transportService, actionFilters, repositoriesService); } - public static class TransportAction extends HandledTransportAction { - - private final TransportService transportService; - private final ClusterService clusterService; - private final RepositoriesService repositoriesService; - - @Inject - public TransportAction( - TransportService transportService, - ActionFilters actionFilters, - ClusterService clusterService, - RepositoriesService repositoriesService - ) { - super(NAME, transportService, actionFilters, RepositoryAnalyzeAction.Request::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); - this.transportService = transportService; - this.clusterService = clusterService; - this.repositoriesService = repositoriesService; - } - - @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState state = clusterService.state(); - - final ThreadPool threadPool = transportService.getThreadPool(); - request.reseed(threadPool.relativeTimeInMillis()); + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); - final DiscoveryNode localNode = transportService.getLocalNode(); - if (isSnapshotNode(localNode)) { - final Repository repository = repositoriesService.repository(request.getRepositoryName()); - if (repository instanceof BlobStoreRepository == false) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); - } - if (repository.isReadOnly()) { - throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); - } + final ThreadPool threadPool = transportService.getThreadPool(); + request.reseed(threadPool.relativeTimeInMillis()); - assert task instanceof CancellableTask; - new AsyncAction( - transportService, - (BlobStoreRepository) repository, - (CancellableTask) task, - request, - state.nodes(), - threadPool::relativeTimeInMillis, - listener - ).run(); - return; + final DiscoveryNode localNode = transportService.getLocalNode(); + if (isSnapshotNode(localNode)) { + final Repository repository = repositoriesService.repository(request.getRepositoryName()); + if (repository instanceof BlobStoreRepository == false) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); } - - if (request.getReroutedFrom() != null) { - assert false : request.getReroutedFrom(); - throw new IllegalArgumentException( - "analysis of repository [" - + request.getRepositoryName() - + "] rerouted from [" - + request.getReroutedFrom() - + "] to non-snapshot node" - ); + if (repository.isReadOnly()) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); } - request.reroutedFrom(localNode); - final List snapshotNodes = getSnapshotNodes(state.nodes()); - if (snapshotNodes.isEmpty()) { - listener.onFailure( - new IllegalArgumentException("no snapshot nodes found for analysis of repository [" + request.getRepositoryName() + "]") - ); - } else { - if (snapshotNodes.size() > 1) { - snapshotNodes.remove(state.nodes().getMasterNode()); - } - final DiscoveryNode targetNode = snapshotNodes.get(new Random(request.getSeed()).nextInt(snapshotNodes.size())); - RepositoryAnalyzeAction.logger.trace("rerouting analysis [{}] to [{}]", request.getDescription(), targetNode); - transportService.sendChildRequest( - targetNode, - NAME, - request, - task, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(listener, Response::new, TransportResponseHandler.TRANSPORT_WORKER) - ); + assert task instanceof CancellableTask; + new AsyncAction( + transportService, + (BlobStoreRepository) repository, + (CancellableTask) task, + request, + state.nodes(), + state.getMinTransportVersion(), + threadPool::relativeTimeInMillis, + listener + ).run(); + return; + } + + if (request.getReroutedFrom() != null) { + assert false : request.getReroutedFrom(); + throw new IllegalArgumentException( + "analysis of repository [" + + request.getRepositoryName() + + "] rerouted from [" + + request.getReroutedFrom() + + "] to non-snapshot node" + ); + } + + request.reroutedFrom(localNode); + final List snapshotNodes = getSnapshotNodes(state.nodes()); + if (snapshotNodes.isEmpty()) { + listener.onFailure( + new IllegalArgumentException("no snapshot nodes found for analysis of repository [" + request.getRepositoryName() + "]") + ); + } else { + if (snapshotNodes.size() > 1) { + snapshotNodes.remove(state.nodes().getMasterNode()); } + final DiscoveryNode targetNode = snapshotNodes.get(new Random(request.getSeed()).nextInt(snapshotNodes.size())); + RepositoryAnalyzeAction.logger.trace("rerouting analysis [{}] to [{}]", request.getDescription(), targetNode); + transportService.sendChildRequest( + targetNode, + INSTANCE.name(), + request, + task, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(listener, Response::new, TransportResponseHandler.TRANSPORT_WORKER) + ); } } @@ -362,8 +369,10 @@ public static class AsyncAction { private final CancellableTask task; private final Request request; private final DiscoveryNodes discoveryNodes; + private final TransportVersion minClusterTransportVersion; private final LongSupplier currentTimeMillisSupplier; private final ActionListener listener; + private final SubscribableListener cancellationListener; private final long timeoutTimeMillis; // choose the blob path nondeterministically to avoid clashes, assuming that the actual path doesn't matter for reproduction @@ -384,6 +393,7 @@ public AsyncAction( CancellableTask task, Request request, DiscoveryNodes discoveryNodes, + TransportVersion minClusterTransportVersion, LongSupplier currentTimeMillisSupplier, ActionListener listener ) { @@ -392,17 +402,27 @@ public AsyncAction( this.task = task; this.request = request; this.discoveryNodes = discoveryNodes; + this.minClusterTransportVersion = minClusterTransportVersion; this.currentTimeMillisSupplier = currentTimeMillisSupplier; this.timeoutTimeMillis = currentTimeMillisSupplier.getAsLong() + request.getTimeout().millis(); - this.listener = listener; + + this.cancellationListener = new SubscribableListener<>(); + this.listener = ActionListener.runBefore(listener, () -> cancellationListener.onResponse(null)); responses = new ArrayList<>(request.blobCount); } - private void fail(Exception e) { + private boolean setFirstFailure(Exception e) { if (failure.compareAndSet(null, e)) { transportService.getTaskManager().cancelTaskAndDescendants(task, "task failed", false, ActionListener.noop()); + return true; } else { + return false; + } + } + + private void fail(Exception e) { + if (setFirstFailure(e) == false) { if (innerFailures.tryAcquire()) { final Throwable cause = ExceptionsHelper.unwrapCause(e); if (cause instanceof TaskCancelledException || cause instanceof ReceiveTimeoutTransportException) { @@ -415,33 +435,31 @@ private void fail(Exception e) { } /** - * Check that we haven't already failed or been cancelled or timed out; if newly cancelled or timed out then record this as the root - * cause of failure. + * Check that we haven't already failed (including cancellation and timing out). */ private boolean isRunning() { - if (failure.get() != null) { - return false; - } + return failure.get() == null; + } - if (task.isCancelled()) { - failure.compareAndSet(null, new RepositoryVerificationException(request.repositoryName, "verification cancelled")); - // if this CAS failed then we're failing for some other reason, nbd; also if the task is cancelled then its descendants are - // also cancelled, so no further action is needed either way. - return false; + private class CheckForCancelListener implements ActionListener { + @Override + public void onResponse(Void unused) { + // task complete, nothing to do } - if (timeoutTimeMillis < currentTimeMillisSupplier.getAsLong()) { - if (failure.compareAndSet( - null, - new RepositoryVerificationException(request.repositoryName, "analysis timed out after [" + request.getTimeout() + "]") - )) { - transportService.getTaskManager().cancelTaskAndDescendants(task, "timed out", false, ActionListener.noop()); + @Override + public void onFailure(Exception e) { + assert e instanceof ElasticsearchTimeoutException : e; + if (isRunning()) { + // if this CAS fails then we're already failing for some other reason, nbd + setFirstFailure( + new RepositoryVerificationException( + request.repositoryName, + "analysis timed out after [" + request.getTimeout() + "]" + ) + ); } - // if this CAS failed then we're already failing for some other reason, nbd - return false; } - - return true; } public void run() { @@ -450,23 +468,43 @@ public void run() { logger.info("running analysis of repository [{}] using path [{}]", request.getRepositoryName(), blobPath); + cancellationListener.addTimeout(request.getTimeout(), repository.threadPool(), EsExecutors.DIRECT_EXECUTOR_SERVICE); + cancellationListener.addListener(new CheckForCancelListener()); + + task.addListener(() -> setFirstFailure(new RepositoryVerificationException(request.repositoryName, "analysis cancelled"))); + final Random random = new Random(request.getSeed()); final List nodes = getSnapshotNodes(discoveryNodes); - final String registerName = "test-register-" + UUIDs.randomBase64UUID(random); - try (var registerRefs = new RefCountingRunnable(finalRegisterValueVerifier(registerName, random, requestRefs.acquire()))) { - final int registerOperations = Math.max(nodes.size(), request.getConcurrency()); - for (int i = 0; i < registerOperations; i++) { - final RegisterAnalyzeAction.Request registerAnalyzeRequest = new RegisterAnalyzeAction.Request( - request.getRepositoryName(), - blobPath, - registerName, - registerOperations, - random.nextInt((registerOperations + 1) * 2) - ); - final DiscoveryNode node = nodes.get(i < nodes.size() ? i : random.nextInt(nodes.size())); - final Releasable registerRef = registerRefs.acquire(); - queue.add(ref -> runRegisterAnalysis(Releasables.wrap(registerRef, ref), registerAnalyzeRequest, node)); + if (minClusterTransportVersion.onOrAfter(TransportVersions.V_8_8_0)) { + final String contendedRegisterName = CONTENDED_REGISTER_NAME_PREFIX + UUIDs.randomBase64UUID(random); + final AtomicBoolean contendedRegisterAnalysisComplete = new AtomicBoolean(); + try ( + var registerRefs = new RefCountingRunnable( + finalRegisterValueVerifier( + contendedRegisterName, + random, + Releasables.wrap(requestRefs.acquire(), () -> contendedRegisterAnalysisComplete.set(true)) + ) + ) + ) { + final int registerOperations = Math.max(nodes.size(), request.getConcurrency()); + for (int i = 0; i < registerOperations; i++) { + final ContendedRegisterAnalyzeAction.Request registerAnalyzeRequest = new ContendedRegisterAnalyzeAction.Request( + request.getRepositoryName(), + blobPath, + contendedRegisterName, + registerOperations, + random.nextInt((registerOperations + 1) * 2) + ); + final DiscoveryNode node = nodes.get(i < nodes.size() ? i : random.nextInt(nodes.size())); + final Releasable registerRef = registerRefs.acquire(); + queue.add(ref -> runContendedRegisterAnalysis(Releasables.wrap(registerRef, ref), registerAnalyzeRequest, node)); + } + } + + if (minClusterTransportVersion.onOrAfter(TransportVersions.UNCONTENDED_REGISTER_ANALYSIS_ADDED)) { + new UncontendedRegisterAnalysis(new Random(random.nextLong()), nodes, contendedRegisterAnalysisComplete).run(); } } @@ -536,7 +574,7 @@ private void runBlobAnalysis(Releasable ref, final BlobAnalyzeAction.Request req BlobAnalyzeAction.NAME, request, task, - TransportRequestOptions.timeout(TimeValue.timeValueMillis(timeoutTimeMillis - currentTimeMillisSupplier.getAsLong())), + TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(ActionListener.releaseAfter(new ActionListener<>() { @Override public void onResponse(BlobAnalyzeAction.Response response) { @@ -568,11 +606,11 @@ private BlobContainer getBlobContainer() { return repository.blobStore().blobContainer(repository.basePath().add(blobPath)); } - private void runRegisterAnalysis(Releasable ref, RegisterAnalyzeAction.Request request, DiscoveryNode node) { - if (node.getVersion().onOrAfter(Version.V_8_8_0) && isRunning()) { + private void runContendedRegisterAnalysis(Releasable ref, ContendedRegisterAnalyzeAction.Request request, DiscoveryNode node) { + if (isRunning()) { transportService.sendChildRequest( node, - RegisterAnalyzeAction.NAME, + ContendedRegisterAnalyzeAction.NAME, request, task, TransportRequestOptions.EMPTY, @@ -604,9 +642,7 @@ private Runnable finalRegisterValueVerifier(String registerName, Random random, @Override public void onResponse(OptionalBytesReference actualFinalRegisterValue) { if (actualFinalRegisterValue.isPresent() == false - || RegisterAnalyzeAction.longFromBytes( - actualFinalRegisterValue.bytesReference() - ) != expectedFinalRegisterValue) { + || longFromBytes(actualFinalRegisterValue.bytesReference()) != expectedFinalRegisterValue) { fail( new RepositoryVerificationException( request.getRepositoryName(), @@ -630,22 +666,22 @@ public void onFailure(Exception exp) { } }, ref), listener -> { switch (random.nextInt(3)) { - case 0 -> getBlobContainer().getRegister(OperationPurpose.SNAPSHOT, registerName, listener); + case 0 -> getBlobContainer().getRegister(OperationPurpose.REPOSITORY_ANALYSIS, registerName, listener); case 1 -> getBlobContainer().compareAndExchangeRegister( - OperationPurpose.SNAPSHOT, + OperationPurpose.REPOSITORY_ANALYSIS, registerName, - RegisterAnalyzeAction.bytesFromLong(expectedFinalRegisterValue), + bytesFromLong(expectedFinalRegisterValue), new BytesArray(new byte[] { (byte) 0xff }), listener ); case 2 -> getBlobContainer().compareAndSetRegister( - OperationPurpose.SNAPSHOT, + OperationPurpose.REPOSITORY_ANALYSIS, registerName, - RegisterAnalyzeAction.bytesFromLong(expectedFinalRegisterValue), + bytesFromLong(expectedFinalRegisterValue), new BytesArray(new byte[] { (byte) 0xff }), listener.map( b -> b - ? OptionalBytesReference.of(RegisterAnalyzeAction.bytesFromLong(expectedFinalRegisterValue)) + ? OptionalBytesReference.of(bytesFromLong(expectedFinalRegisterValue)) : OptionalBytesReference.MISSING ) ); @@ -661,6 +697,59 @@ public void onFailure(Exception exp) { }; } + private class UncontendedRegisterAnalysis implements Runnable { + private final Random random; + private final String registerName; + private final List nodes; + private final AtomicBoolean otherAnalysisComplete; + private int currentValue; // actions run in strict sequence so no need for synchronization + + UncontendedRegisterAnalysis(Random random, List nodes, AtomicBoolean otherAnalysisComplete) { + this.random = random; + this.registerName = UNCONTENDED_REGISTER_NAME_PREFIX + UUIDs.randomBase64UUID(random); + this.nodes = nodes; + this.otherAnalysisComplete = otherAnalysisComplete; + } + + private final ActionListener stepListener = new ActionListener<>() { + @Override + public void onResponse(ActionResponse.Empty ignored) { + currentValue += 1; + run(); + } + + @Override + public void onFailure(Exception e) { + fail(e); + } + }; + + @Override + public void run() { + if (isRunning() == false) { + return; + } + + // complete at least request.getConcurrency() steps, but we may as well keep running for longer too + if (currentValue > request.getConcurrency() && otherAnalysisComplete.get()) { + return; + } + + transportService.sendChildRequest( + nodes.get(currentValue < nodes.size() ? currentValue : random.nextInt(nodes.size())), + UncontendedRegisterAnalyzeAction.NAME, + new UncontendedRegisterAnalyzeAction.Request(request.getRepositoryName(), blobPath, registerName, currentValue), + task, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>( + ActionListener.releaseAfter(stepListener, requestRefs.acquire()), + in -> ActionResponse.Empty.INSTANCE, + TransportResponseHandler.TRANSPORT_WORKER + ) + ); + } + } + private void runCleanUp() { transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT).execute(ActionRunnable.wrap(listener, l -> { final long listingStartTimeNanos = System.nanoTime(); @@ -689,7 +778,7 @@ private void ensureConsistentListing() { try { final BlobContainer blobContainer = getBlobContainer(); final Set missingBlobs = new HashSet<>(expectedBlobs); - final Map blobsMap = blobContainer.listBlobs(OperationPurpose.SNAPSHOT); + final Map blobsMap = blobContainer.listBlobs(OperationPurpose.REPOSITORY_ANALYSIS); missingBlobs.removeAll(blobsMap.keySet()); if (missingBlobs.isEmpty()) { @@ -712,11 +801,11 @@ private void ensureConsistentListing() { private void deleteContainer() { try { final BlobContainer blobContainer = getBlobContainer(); - blobContainer.delete(OperationPurpose.SNAPSHOT); + blobContainer.delete(OperationPurpose.REPOSITORY_ANALYSIS); if (failure.get() != null) { return; } - final Map blobsMap = blobContainer.listBlobs(OperationPurpose.SNAPSHOT); + final Map blobsMap = blobContainer.listBlobs(OperationPurpose.REPOSITORY_ANALYSIS); if (blobsMap.isEmpty() == false) { final RepositoryVerificationException repositoryVerificationException = new RepositoryVerificationException( request.repositoryName, diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java index 98a5bb34f99ae..96a4d05d2fb4b 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/SnapshotRepositoryTestKit.java @@ -30,12 +30,7 @@ public class SnapshotRepositoryTestKit extends Plugin implements ActionPlugin { @Override public List> getActions() { - return List.of( - new ActionHandler<>(RepositoryAnalyzeAction.INSTANCE, RepositoryAnalyzeAction.TransportAction.class), - new ActionHandler<>(BlobAnalyzeAction.INSTANCE, BlobAnalyzeAction.TransportAction.class), - new ActionHandler<>(GetBlobChecksumAction.INSTANCE, GetBlobChecksumAction.TransportAction.class), - new ActionHandler<>(RegisterAnalyzeAction.INSTANCE, RegisterAnalyzeAction.TransportAction.class) - ); + return List.of(new ActionHandler<>(RepositoryAnalyzeAction.INSTANCE, RepositoryAnalyzeAction.class)); } @Override diff --git a/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/UncontendedRegisterAnalyzeAction.java b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/UncontendedRegisterAnalyzeAction.java new file mode 100644 index 0000000000000..5a279aaf6a96f --- /dev/null +++ b/x-pack/plugin/snapshot-repo-test-kit/src/main/java/org/elasticsearch/repositories/blobstore/testkit/UncontendedRegisterAnalyzeAction.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.repositories.blobstore.testkit; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.blobstore.BlobContainer; +import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OperationPurpose; +import org.elasticsearch.common.blobstore.OptionalBytesReference; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryVerificationException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.bytesFromLong; +import static org.elasticsearch.repositories.blobstore.testkit.ContendedRegisterAnalyzeAction.longFromBytes; + +class UncontendedRegisterAnalyzeAction extends HandledTransportAction { + + private static final Logger logger = LogManager.getLogger(UncontendedRegisterAnalyzeAction.class); + + static final String NAME = "cluster:admin/repository/analyze/register/uncontended"; + + private final RepositoriesService repositoriesService; + + UncontendedRegisterAnalyzeAction( + TransportService transportService, + ActionFilters actionFilters, + RepositoriesService repositoriesService + ) { + super(NAME, transportService, actionFilters, Request::new, transportService.getThreadPool().executor(ThreadPool.Names.SNAPSHOT)); + this.repositoriesService = repositoriesService; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener outerListener) { + final ActionListener listener = ActionListener.assertOnce(outerListener.map(ignored -> ActionResponse.Empty.INSTANCE)); + final Repository repository = repositoriesService.repository(request.getRepositoryName()); + if (repository instanceof BlobStoreRepository == false) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is not a blob-store repository"); + } + if (repository.isReadOnly()) { + throw new IllegalArgumentException("repository [" + request.getRepositoryName() + "] is read-only"); + } + final BlobStoreRepository blobStoreRepository = (BlobStoreRepository) repository; + final BlobPath path = blobStoreRepository.basePath().add(request.getContainerPath()); + final BlobContainer blobContainer = blobStoreRepository.blobStore().blobContainer(path); + + logger.trace("handling [{}]", request); + + assert task instanceof CancellableTask; + blobContainer.compareAndExchangeRegister( + OperationPurpose.REPOSITORY_ANALYSIS, + request.getRegisterName(), + bytesFromLong(request.getExpectedValue()), + bytesFromLong(request.getExpectedValue() + 1), + new ActionListener<>() { + @Override + public void onResponse(OptionalBytesReference optionalBytesReference) { + ActionListener.completeWith(listener, () -> { + if (optionalBytesReference.isPresent() == false) { + throw new RepositoryVerificationException( + repository.getMetadata().name(), + Strings.format( + "uncontended register operation failed: expected [%d] but did not observe any value", + request.getExpectedValue() + ) + ); + } + + final var witness = longFromBytes(optionalBytesReference.bytesReference()); + if (witness != request.getExpectedValue()) { + throw new RepositoryVerificationException( + repository.getMetadata().name(), + Strings.format( + "uncontended register operation failed: expected [%d] but observed [%d]", + request.getExpectedValue(), + witness + ) + ); + } + + return null; + }); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof UnsupportedOperationException) { + // Registers are not supported on all repository types, and that's ok. + listener.onResponse(null); + } else { + listener.onFailure(e); + } + } + } + ); + } + + static class Request extends ActionRequest { + private final String repositoryName; + private final String containerPath; + private final String registerName; + private final long expectedValue; + + Request(String repositoryName, String containerPath, String registerName, long expectedValue) { + this.repositoryName = repositoryName; + this.containerPath = containerPath; + this.registerName = registerName; + this.expectedValue = expectedValue; + } + + Request(StreamInput in) throws IOException { + super(in); + assert in.getTransportVersion().onOrAfter(TransportVersions.UNCONTENDED_REGISTER_ANALYSIS_ADDED); + repositoryName = in.readString(); + containerPath = in.readString(); + registerName = in.readString(); + expectedValue = in.readVLong(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + assert out.getTransportVersion().onOrAfter(TransportVersions.UNCONTENDED_REGISTER_ANALYSIS_ADDED); + super.writeTo(out); + out.writeString(repositoryName); + out.writeString(containerPath); + out.writeString(registerName); + out.writeVLong(expectedValue); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + String getRepositoryName() { + return repositoryName; + } + + String getContainerPath() { + return containerPath; + } + + String getRegisterName() { + return registerName; + } + + long getExpectedValue() { + return expectedValue; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers); + } + + @Override + public String toString() { + return getDescription(); + } + + @Override + public String getDescription() { + return Strings.format( + """ + UncontendedRegisterAnalyzeAction.Request{\ + repositoryName='%s', containerPath='%s', registerName='%s', expectedValue='%d'}""", + repositoryName, + containerPath, + registerName, + expectedValue + ); + } + } +} diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java index 6c99347076519..296af0c983279 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoBoundingBoxQueryLegacyGeoShapeWithDocValuesIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.spatial.search; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.geo.GeoBoundingBoxQueryIntegTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -43,6 +44,6 @@ public XContentBuilder getMapping() throws IOException { @Override public IndexVersion randomSupportedVersion() { - return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index e70fc324064ad..3c64d140e2b56 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.geo.GeometryTestUtils; @@ -49,6 +48,9 @@ import java.util.function.Function; import java.util.function.Supplier; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; + public class GeoGridAggAndQueryConsistencyIT extends ESIntegTestCase { @Override @@ -115,21 +117,24 @@ public void testKnownIssueWithCellLeftOfDatelineTouchingPolygonOnRightOfDateline .precision(15) .setGeoBoundingBox(boundingBox) .size(256 * 256); - SearchResponse response = client().prepareSearch("test").addAggregation(builderPoint).setSize(0).get(); - InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); - for (InternalGeoGridBucket bucket : gridPoint.getBuckets()) { - assertThat(bucket.getDocCount(), Matchers.greaterThan(0L)); - QueryBuilder queryBuilder = new GeoGridQueryBuilder("geometry").setGridId( - GeoGridQueryBuilder.Grid.GEOHEX, - bucket.getKeyAsString() - ); - response = client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder).get(); - assertThat( - "Bucket " + bucket.getKeyAsString(), - response.getHits().getTotalHits().value, - Matchers.equalTo(bucket.getDocCount()) - ); - } + assertResponse(client().prepareSearch("test").addAggregation(builderPoint).setSize(0), response -> { + InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); + for (InternalGeoGridBucket bucket : gridPoint.getBuckets()) { + assertThat(bucket.getDocCount(), Matchers.greaterThan(0L)); + QueryBuilder queryBuilder = new GeoGridQueryBuilder("geometry").setGridId( + GeoGridQueryBuilder.Grid.GEOHEX, + bucket.getKeyAsString() + ); + assertResponse( + client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), + innerResponse -> assertThat( + "Bucket " + bucket.getKeyAsString(), + innerResponse.getHits().getTotalHits().value, + Matchers.equalTo(bucket.getDocCount()) + ) + ); + } + }); } public void testKnownIssueWithCellIntersectingPolygonAndBoundingBox() throws IOException { @@ -166,17 +171,17 @@ public void testKnownIssueWithCellIntersectingPolygonAndBoundingBox() throws IOE .precision(precision) .setGeoBoundingBox(boundingBox) .size(256 * 256); - SearchResponse response = client().prepareSearch("test").addAggregation(builderPoint).setSize(0).get(); - InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); - for (InternalGeoGridBucket bucket : gridPoint.getBuckets()) { - assertThat(bucket.getDocCount(), Matchers.greaterThan(0L)); - QueryBuilder queryBuilder = new GeoGridQueryBuilder("geometry").setGridId( - GeoGridQueryBuilder.Grid.GEOHEX, - bucket.getKeyAsString() - ); - response = client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder).get(); - assertThat(response.getHits().getTotalHits().value, Matchers.equalTo(bucket.getDocCount())); - } + assertResponse(client().prepareSearch("test").addAggregation(builderPoint).setSize(0), response -> { + InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); + for (InternalGeoGridBucket bucket : gridPoint.getBuckets()) { + assertThat(bucket.getDocCount(), Matchers.greaterThan(0L)); + QueryBuilder queryBuilder = new GeoGridQueryBuilder("geometry").setGridId( + GeoGridQueryBuilder.Grid.GEOHEX, + bucket.getKeyAsString() + ); + assertHitCount(client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), bucket.getDocCount()); + } + }); } private void doTestGeohashGrid(String fieldType, Supplier randomGeometriesSupplier) throws IOException { @@ -269,9 +274,11 @@ private void doTestGrid( for (int i = minPrecision; i <= maxPrecision; i++) { GeoGridAggregationBuilder builderPoint = aggBuilder.apply("geometry").field("geometry").precision(i); - SearchResponse response = client().prepareSearch("test").addAggregation(builderPoint).setSize(0).get(); - InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); - assertQuery(gridPoint.getBuckets(), queryBuilder, i); + int finalI = i; + assertResponse(client().prepareSearch("test").addAggregation(builderPoint).setSize(0), response -> { + InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); + assertQuery(gridPoint.getBuckets(), queryBuilder, finalI); + }); } builder = client().prepareBulk(); @@ -297,9 +304,11 @@ private void doTestGrid( .precision(i) .setGeoBoundingBox(boundingBox) .size(256 * 256); - SearchResponse response = client().prepareSearch("test").addAggregation(builderPoint).setSize(0).get(); - InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); - assertQuery(gridPoint.getBuckets(), queryBuilder, i); + int finalI = i; + assertResponse(client().prepareSearch("test").addAggregation(builderPoint).setSize(0), response -> { + InternalGeoGrid gridPoint = response.getAggregations().get("geometry"); + assertQuery(gridPoint.getBuckets(), queryBuilder, finalI); + }); } } @@ -307,11 +316,13 @@ private void assertQuery(List buckets, BiFunction assertThat( + "Expected hits at precision " + precision + " for H3 cell " + bucket.getKeyAsString(), + response.getHits().getTotalHits().value, + Matchers.equalTo(bucket.getDocCount()) + ) ); } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java index 13a4de7f97fa4..57e654fc0901c 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeScriptDocValuesIT.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.spatial.search; import org.apache.lucene.geo.Circle; -import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.geo.BoundingBox; import org.elasticsearch.common.geo.GeoPoint; @@ -51,7 +51,8 @@ import java.util.function.Function; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertCheckedResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -259,37 +260,36 @@ private void doTestGeometry(Geometry geometry, GeoShapeValues.GeoShapeValue expe GeoShapeValues.GeoShapeValue value = GeoTestUtils.geoShapeValue(geometry); - SearchResponse searchResponse = client().prepareSearch() + SearchRequestBuilder searchRequest = client().prepareSearch() .addStoredField("_source") .addScriptField("lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lat", Collections.emptyMap())) .addScriptField("lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lon", Collections.emptyMap())) .addScriptField("height", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "height", Collections.emptyMap())) .addScriptField("width", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "width", Collections.emptyMap())) .addScriptField("label_lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lat", Collections.emptyMap())) - .addScriptField("label_lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lon", Collections.emptyMap())) - .get(); - assertSearchResponse(searchResponse); - Map fields = searchResponse.getHits().getHits()[0].getFields(); - assertThat(fields.get("lat").getValue(), equalTo(value.getY())); - assertThat(fields.get("lon").getValue(), equalTo(value.getX())); - assertThat(fields.get("height").getValue(), equalTo(value.boundingBox().maxY() - value.boundingBox().minY())); - assertThat(fields.get("width").getValue(), equalTo(value.boundingBox().maxX() - value.boundingBox().minX())); - - // Check label position is in the geometry, but with a tolerance constructed as a circle of 1m radius to handle quantization - Point labelPosition = new Point(fields.get("label_lon").getValue(), fields.get("label_lat").getValue()); - Circle tolerance = new Circle(labelPosition.getY(), labelPosition.getX(), 1); - assertTrue( - "Expect label position " + labelPosition + " to intersect geometry " + geometry, - value.relate(tolerance) != GeoRelation.QUERY_DISJOINT - ); - - // Check that the label position is the expected one, or the centroid in certain polygon cases - if (expectedLabelPosition != null) { - doTestLabelPosition(fields, expectedLabelPosition); - } else if (fallbackToCentroid && value.dimensionalShapeType() == DimensionalShapeType.POLYGON) { - // Use the centroid for all polygons, unless overwritten for specific cases - doTestLabelPosition(fields, GeoTestUtils.geoShapeValue(new Point(value.getX(), value.getY()))); - } + .addScriptField("label_lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "label_lon", Collections.emptyMap())); + + assertCheckedResponse(searchRequest, response -> { + Map fields = response.getHits().getHits()[0].getFields(); + assertThat(fields.get("lat").getValue(), equalTo(value.getY())); + assertThat(fields.get("lon").getValue(), equalTo(value.getX())); + assertThat(fields.get("height").getValue(), equalTo(value.boundingBox().maxY() - value.boundingBox().minY())); + assertThat(fields.get("width").getValue(), equalTo(value.boundingBox().maxX() - value.boundingBox().minX())); + // Check label position is in the geometry, but with a tolerance constructed as a circle of 1m radius to handle quantization + Point labelPosition = new Point(fields.get("label_lon").getValue(), fields.get("label_lat").getValue()); + Circle tolerance = new Circle(labelPosition.getY(), labelPosition.getX(), 1); + assertTrue( + "Expect label position " + labelPosition + " to intersect geometry " + geometry, + value.relate(tolerance) != GeoRelation.QUERY_DISJOINT + ); + // Check that the label position is the expected one, or the centroid in certain polygon cases + if (expectedLabelPosition != null) { + doTestLabelPosition(fields, expectedLabelPosition); + } else if (fallbackToCentroid && value.dimensionalShapeType() == DimensionalShapeType.POLYGON) { + // Use the centroid for all polygons, unless overwritten for specific cases + doTestLabelPosition(fields, GeoTestUtils.geoShapeValue(new Point(value.getX(), value.getY()))); + } + }); } private void doTestLabelPosition(Map fields, GeoShapeValues.GeoShapeValue expectedLabelPosition) @@ -316,18 +316,18 @@ public void testNullShape() throws Exception { indicesAdmin().prepareRefresh("test").get(); - SearchResponse searchResponse = client().prepareSearch() + SearchRequestBuilder searchRequestBuilder = client().prepareSearch() .addStoredField("_source") .addScriptField("lat", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lat", Collections.emptyMap())) .addScriptField("lon", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "lon", Collections.emptyMap())) .addScriptField("height", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "height", Collections.emptyMap())) - .addScriptField("width", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "width", Collections.emptyMap())) - .get(); - assertSearchResponse(searchResponse); - Map fields = searchResponse.getHits().getHits()[0].getFields(); - assertThat(fields.get("lat").getValue(), equalTo(Double.NaN)); - assertThat(fields.get("lon").getValue(), equalTo(Double.NaN)); - assertThat(fields.get("height").getValue(), equalTo(Double.NaN)); - assertThat(fields.get("width").getValue(), equalTo(Double.NaN)); + .addScriptField("width", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "width", Collections.emptyMap())); + assertNoFailuresAndResponse(searchRequestBuilder, response -> { + Map fields = response.getHits().getHits()[0].getFields(); + assertThat(fields.get("lat").getValue(), equalTo(Double.NaN)); + assertThat(fields.get("lon").getValue(), equalTo(Double.NaN)); + assertThat(fields.get("height").getValue(), equalTo(Double.NaN)); + assertThat(fields.get("width").getValue(), equalTo(Double.NaN)); + }); } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index bbc685e7bde30..e354feb60c95f 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.spatial.search; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.geometry.Geometry; @@ -18,6 +17,7 @@ import org.elasticsearch.geometry.utils.StandardValidator; import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.percolator.PercolateQueryBuilder; import org.elasticsearch.percolator.PercolatorPlugin; @@ -40,6 +40,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -70,9 +71,7 @@ protected boolean allowExpensiveQueries() { public void testMappingUpdate() { // create index IndexVersion version = randomSupportedVersion(); - assertAcked( - indicesAdmin().prepareCreate("test").setSettings(settings(version).build()).setMapping("shape", "type=geo_shape").get() - ); + assertAcked(indicesAdmin().prepareCreate("test").setSettings(settings(version).build()).setMapping("shape", "type=geo_shape")); ensureGreen(); String update = """ @@ -85,7 +84,7 @@ public void testMappingUpdate() { } }"""; - if (version.before(IndexVersion.V_8_0_0)) { + if (version.before(IndexVersions.V_8_0_0)) { IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> indicesAdmin().preparePutMapping("test").setSource(update, XContentType.JSON).get() @@ -149,14 +148,15 @@ public void testPercolatorGeoQueries() throws Exception { refresh(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "POINT(4.51 52.20)").endObject()); - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) - .addSort("id", SortOrder.ASC) - .get(); - assertHitCount(response, 3); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + assertNoFailuresAndResponse( + client().prepareSearch().setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)).addSort("id", SortOrder.ASC), + response -> { + assertHitCount(response, 3); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + } + ); } // make sure we store the normalised geometry @@ -177,18 +177,19 @@ public void testStorePolygonDateLine() throws Exception { indexRandom(true, client().prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); - SearchResponse searchResponse = client().prepareSearch("test").setFetchSource(false).addStoredField("shape").get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - SearchHit searchHit = searchResponse.getHits().getAt(0); - assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); - BytesRef bytesRef = searchHit.field("shape").getValue(); - Geometry geometry = WellKnownBinary.fromWKB( - StandardValidator.instance(true), - false, - bytesRef.bytes, - bytesRef.offset, - bytesRef.length - ); - assertThat(geometry.type(), equalTo(ShapeType.MULTIPOLYGON)); + assertNoFailuresAndResponse(client().prepareSearch("test").setFetchSource(false).addStoredField("shape"), response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + SearchHit searchHit = response.getHits().getAt(0); + assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); + BytesRef bytesRef = searchHit.field("shape").getValue(); + Geometry geometry = WellKnownBinary.fromWKB( + StandardValidator.instance(true), + false, + bytesRef.bytes, + bytesRef.offset, + bytesRef.length + ); + assertThat(geometry.type(), equalTo(ShapeType.MULTIPOLYGON)); + }); } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java index 4a5b46a2d1ed3..562f2fd681d97 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/LegacyGeoShapeWithDocValuesIT.java @@ -7,9 +7,9 @@ package org.elasticsearch.xpack.spatial.search; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.geometry.Circle; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.geo.GeoShapeIntegTestCase; import org.elasticsearch.test.index.IndexVersionUtils; @@ -24,8 +24,8 @@ import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; public class LegacyGeoShapeWithDocValuesIT extends GeoShapeIntegTestCase { @@ -43,7 +43,7 @@ protected void getGeoShapeMapping(XContentBuilder b) throws IOException { @Override protected IndexVersion randomSupportedVersion() { // legacy shapes can only be created in version lower than 8.x - return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + return IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); } @Override @@ -57,7 +57,6 @@ public void testMappingUpdate() { indicesAdmin().prepareCreate("test") .setSettings(settings(randomSupportedVersion()).build()) .setMapping("shape", "type=geo_shape,strategy=recursive") - .get() ); ensureGreen(); @@ -85,7 +84,6 @@ public void testLegacyCircle() throws Exception { assertAcked( prepareCreate("test").setSettings(settings(randomSupportedVersion()).build()) .setMapping("shape", "type=geo_shape,strategy=recursive,tree=geohash") - .get() ); ensureGreen(); @@ -102,7 +100,6 @@ public void testLegacyCircle() throws Exception { })); // test self crossing of circles - SearchResponse searchResponse = client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Circle(30, 50, 77000))).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertHitCount(client().prepareSearch("test").setQuery(geoShapeQuery("shape", new Circle(30, 50, 77000))), 1L); } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 8903904eeefd7..0abf475e59048 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.settings.Settings; @@ -35,10 +34,10 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; public class ShapeQueryOverShapeTests extends ShapeQueryTestCase { @@ -74,7 +73,6 @@ public void setUp() throws Exception { assertAcked( indicesAdmin().prepareCreate(IGNORE_MALFORMED_INDEX) .setMapping(FIELD, "type=shape,ignore_malformed=true", "_source", "enabled=false") - .get() ); ensureGreen(); @@ -175,45 +173,29 @@ public void testShapeFetchingPath() throws Exception { ShapeQueryBuilder filter = new ShapeQueryBuilder("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex(indexName) .indexedShapePath("location"); - SearchResponse result = client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter), 1L); filter = new ShapeQueryBuilder("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex(indexName) .indexedShapePath("1.location"); - result = client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter), 1L); filter = new ShapeQueryBuilder("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex(indexName) .indexedShapePath("1.2.location"); - result = client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter), 1L); filter = new ShapeQueryBuilder("location", "1").relation(ShapeRelation.INTERSECTS) .indexedShapeIndex(indexName) .indexedShapePath("1.2.3.location"); - result = client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(QueryBuilders.matchAllQuery()).setPostFilter(filter), 1L); // now test the query variant ShapeQueryBuilder query = new ShapeQueryBuilder("location", "1").indexedShapeIndex(indexName).indexedShapePath("location"); - result = client().prepareSearch(searchIndex).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(query), 1L); query = new ShapeQueryBuilder("location", "1").indexedShapeIndex(indexName).indexedShapePath("1.location"); - result = client().prepareSearch(searchIndex).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(query), 1L); query = new ShapeQueryBuilder("location", "1").indexedShapeIndex(indexName).indexedShapePath("1.2.location"); - result = client().prepareSearch(searchIndex).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(query), 1L); query = new ShapeQueryBuilder("location", "1").indexedShapeIndex(indexName).indexedShapePath("1.2.3.location"); - result = client().prepareSearch(searchIndex).setQuery(query).get(); - assertSearchResponse(result); - assertHitCount(result, 1); + assertHitCountAndNoFailures(client().prepareSearch(searchIndex).setQuery(query), 1L); } /** @@ -239,11 +221,10 @@ public void testIndexShapeRouting() { client().prepareIndex(INDEX).setId("0").setSource(source, XContentType.JSON).setRouting("ABC").get(); indicesAdmin().prepareRefresh(INDEX).get(); - SearchResponse searchResponse = client().prepareSearch(INDEX) - .setQuery(new ShapeQueryBuilder(FIELD, "0").indexedShapeIndex(INDEX).indexedShapeRouting("ABC")) - .get(); - - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs + 1)); + assertHitCount( + client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder(FIELD, "0").indexedShapeIndex(INDEX).indexedShapeRouting("ABC")), + (long) numDocs + 1 + ); } public void testNullShape() { @@ -264,16 +245,16 @@ public void testNullShape() { public void testExistsQuery() { ExistsQueryBuilder eqb = QueryBuilders.existsQuery(FIELD); - SearchResponse result = client().prepareSearch(INDEX).setQuery(eqb).get(); - assertSearchResponse(result); - assertHitCount(result, numDocs); + assertHitCountAndNoFailures(client().prepareSearch(INDEX).setQuery(eqb), numDocs); } public void testFieldAlias() { - SearchResponse response = client().prepareSearch(INDEX) - .setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)) - .get(); - assertTrue(response.getHits().getTotalHits().value > 0); + assertResponse( + client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)), + response -> { + assertTrue(response.getHits().getTotalHits().value > 0); + } + ); } public void testContainsShapeQuery() { @@ -287,10 +268,7 @@ public void testContainsShapeQuery() { // index the mbr of the collection Rectangle rectangle = new Rectangle(-50, 50, 50, -50); ShapeQueryBuilder queryBuilder = new ShapeQueryBuilder("location", rectangle).relation(ShapeRelation.CONTAINS); - SearchResponse response = client().prepareSearch("test_contains").setQuery(queryBuilder).get(); - assertSearchResponse(response); - - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertHitCountAndNoFailures(client().prepareSearch("test_contains").setQuery(queryBuilder), 1L); } public void testGeometryCollectionRelations() throws IOException { @@ -318,51 +296,60 @@ public void testGeometryCollectionRelations() throws IOException { { // A geometry collection that is fully within the indexed shape GeometryCollection collection = new GeometryCollection<>(List.of(new Point(1, 2), new Point(-2, -1))); - SearchResponse response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.CONTAINS)), + 1L + ); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.INTERSECTS)), + 1L + ); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.DISJOINT)), + 0L + ); } { // A geometry collection (as multi point) that is partially within the indexed shape MultiPoint multiPoint = new MultiPoint(List.of(new Point(1, 2), new Point(20, 30))); - SearchResponse response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", multiPoint).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", multiPoint).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", multiPoint).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", multiPoint).relation(ShapeRelation.CONTAINS)), + 0L + ); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", multiPoint).relation(ShapeRelation.INTERSECTS)), + 1L + ); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", multiPoint).relation(ShapeRelation.DISJOINT)), + 0L + ); } { // A geometry collection that is disjoint with the indexed shape MultiPoint multiPoint = new MultiPoint(List.of(new Point(-20, -30), new Point(20, 30))); GeometryCollection collection = new GeometryCollection<>(List.of(multiPoint)); - SearchResponse response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_collections") - .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.CONTAINS)), + 0L + ); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.INTERSECTS)), + 0L + ); + assertHitCount( + client().prepareSearch("test_collections") + .setQuery(new ShapeQueryBuilder("geometry", collection).relation(ShapeRelation.DISJOINT)), + 1L + ); } } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java index 1aad094bfe5b9..1160af2a98071 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java @@ -8,7 +8,6 @@ import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.geometry.Circle; @@ -32,7 +31,9 @@ import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCountAndNoFailures; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -83,22 +84,26 @@ public void testIndexPointsFilterRectangle() throws Exception { Rectangle rectangle = new Rectangle(-45, 45, 45, -45); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); // default query, without specifying relation (expect intersects) - searchResponse = client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)).get(); - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsCircle() throws Exception { @@ -120,14 +125,15 @@ public void testIndexPointsCircle() throws Exception { Circle circle = new Circle(-30, -30, 1); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsPolygon() throws Exception { @@ -149,14 +155,15 @@ public void testIndexPointsPolygon() throws Exception { Polygon polygon = new Polygon(new LinearRing(new double[] { -35, -35, -25, -25, -35 }, new double[] { -35, -25, -25, -35, -35 })); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertSearchResponse(searchResponse); - SearchHits searchHits = searchResponse.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); - assertThat(searchHits.getAt(0).getId(), equalTo("1")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), + response -> { + SearchHits searchHits = response.getHits(); + assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getAt(0).getId(), equalTo("1")); + } + ); } public void testIndexPointsMultiPolygon() throws Exception { @@ -191,15 +198,16 @@ public void testIndexPointsMultiPolygon() throws Exception { MultiPolygon mp = new MultiPolygon(List.of(encloseDocument1Shape, encloseDocument2Shape)); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(2)); - assertThat(searchResponse.getHits().getAt(0).getId(), not(equalTo("2"))); - assertThat(searchResponse.getHits().getAt(1).getId(), not(equalTo("2"))); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getHits().length, equalTo(2)); + assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); + assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); + } + ); } public void testIndexPointsRectangle() throws Exception { @@ -221,14 +229,15 @@ public void testIndexPointsRectangle() throws Exception { Rectangle rectangle = new Rectangle(-50, -40, -45, -55); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)) - .get(); - - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("2")); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("2")); + } + ); } public void testIndexPointsIndexedRectangle() throws Exception { @@ -275,28 +284,29 @@ public void testIndexPointsIndexedRectangle() throws Exception { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse searchResponse = client().prepareSearch(defaultIndexName) - .setQuery( - new ShapeQueryBuilder(defaultFieldName, "shape1").relation(ShapeRelation.INTERSECTS) - .indexedShapeIndex(indexedShapeIndex) - .indexedShapePath(indexedShapePath) - ) - .get(); + assertNoFailuresAndResponse( + client().prepareSearch(defaultIndexName) + .setQuery( + new ShapeQueryBuilder(defaultFieldName, "shape1").relation(ShapeRelation.INTERSECTS) + .indexedShapeIndex(indexedShapeIndex) + .indexedShapePath(indexedShapePath) + ), + response -> { + assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getHits().length, equalTo(1)); + assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); + } + ); - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); - assertThat(searchResponse.getHits().getHits().length, equalTo(1)); - assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("point2")); - - searchResponse = client().prepareSearch(defaultIndexName) - .setQuery( - new ShapeQueryBuilder(defaultFieldName, "shape2").relation(ShapeRelation.INTERSECTS) - .indexedShapeIndex(indexedShapeIndex) - .indexedShapePath(indexedShapePath) - ) - .get(); - assertSearchResponse(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertHitCountAndNoFailures( + client().prepareSearch(defaultIndexName) + .setQuery( + new ShapeQueryBuilder(defaultFieldName, "shape2").relation(ShapeRelation.INTERSECTS) + .indexedShapeIndex(indexedShapeIndex) + .indexedShapePath(indexedShapePath) + ), + 0L + ); } public void testDistanceQuery() throws Exception { @@ -326,21 +336,24 @@ public void testDistanceQuery() throws Exception { ).setRefreshPolicy(IMMEDIATE) ).actionGet(); - SearchResponse response = client().prepareSearch("test_distance") - .setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.WITHIN)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_distance") - .setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.INTERSECTS)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_distance") - .setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.DISJOINT)) - .get(); - assertEquals(2, response.getHits().getTotalHits().value); - response = client().prepareSearch("test_distance") - .setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.CONTAINS)) - .get(); - assertEquals(0, response.getHits().getTotalHits().value); + assertHitCount( + client().prepareSearch("test_distance").setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.WITHIN)), + 2L + ); + + assertHitCount( + client().prepareSearch("test_distance").setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.INTERSECTS)), + 2L + ); + + assertHitCount( + client().prepareSearch("test_distance").setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.DISJOINT)), + 2L + ); + + assertHitCount( + client().prepareSearch("test_distance").setQuery(new ShapeQueryBuilder("location", circle).relation(ShapeRelation.CONTAINS)), + 0L + ); } } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/SpatialQueryStringIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/SpatialQueryStringIT.java index 6ff882f97e319..04f260fa1f1c6 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/SpatialQueryStringIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/SpatialQueryStringIT.java @@ -63,32 +63,26 @@ public void testBasicAllQuery() throws Exception { ); // nothing matches indexRandom(true, false, reqs); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("foo")), 0L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("foo")), 0L); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")), 0L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("\"2015/09/02\"")), 0L); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")), 0L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("127.0.0.1 OR 1.8")), 0L); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)")), 0L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)")), 0L); Exception e = expectThrows( Exception.class, - () -> client().prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("geo_shape")).get() + () -> prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("geo_shape")).get() ); assertThat(e.getCause().getMessage(), containsString("Field [geo_shape] of type [geo_shape] does not support match queries")); - e = expectThrows( - Exception.class, - () -> client().prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("shape")).get() - ); + e = expectThrows(Exception.class, () -> prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("shape")).get()); assertThat(e.getCause().getMessage(), containsString("Field [shape] of type [shape] does not support match queries")); - e = expectThrows( - Exception.class, - () -> client().prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("point")).get() - ); + e = expectThrows(Exception.class, () -> prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("point")).get()); assertThat(e.getCause().getMessage(), containsString("Field [point] of type [point] does not support match queries")); - assertHitCount(client().prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("*shape")), 0L); + assertHitCount(prepareSearch("test").setQuery(queryStringQuery("POINT(0 0)").field("*shape")), 0L); } } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java index 3e904f59ad44e..b07b6da96833f 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.geometry.utils.GeometryValidator; import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; @@ -138,7 +139,7 @@ public Builder( this.geoFormatterFactory = geoFormatterFactory; this.ignoreMalformed = ignoreMalformedParam(m -> builder(m).ignoreMalformed.get(), ignoreMalformedByDefault); this.coerce = coerceParam(m -> builder(m).coerce.get(), coerceByDefault); - this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), IndexVersion.V_7_8_0.onOrBefore(version)); + this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), IndexVersions.V_7_8_0.onOrBefore(version)); addScriptValidation(script, indexed, hasDocValues); } @@ -266,7 +267,7 @@ public String typeName() { public Query geoShapeQuery(SearchExecutionContext context, String fieldName, ShapeRelation relation, LatLonGeometry... geometries) { failIfNotIndexedNorDocValuesFallback(context); // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0) - if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersion.V_7_5_0)) { + if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { throw new QueryShardException( context, ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]." @@ -335,7 +336,7 @@ public Mapper.Builder parse(String name, Map node, MappingParser boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(parserContext.getSettings()); boolean coerceByDefault = COERCE_SETTING.get(parserContext.getSettings()); if (LegacyGeoShapeFieldMapper.containsDeprecatedParameter(node.keySet())) { - if (parserContext.indexVersionCreated().onOrAfter(IndexVersion.V_8_0_0)) { + if (parserContext.indexVersionCreated().onOrAfter(IndexVersions.V_8_0_0)) { Set deprecatedParams = LegacyGeoShapeFieldMapper.getDeprecatedParameters(node.keySet()); throw new IllegalArgumentException( "using deprecated parameters " diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java index 838fd56cfc11a..21c4a1f97c3ef 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapper.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; @@ -91,7 +92,7 @@ public Builder(String name, IndexVersion version, boolean ignoreMalformedByDefau this.version = version; this.ignoreMalformed = ignoreMalformedParam(m -> builder(m).ignoreMalformed.get(), ignoreMalformedByDefault); this.coerce = coerceParam(m -> builder(m).coerce.get(), coerceByDefault); - this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), IndexVersion.V_8_4_0.onOrBefore(version)); + this.hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), IndexVersions.V_8_4_0.onOrBefore(version)); } @Override diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java index 7b28a57759c3d..ac526e6016b23 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryProcessor.java @@ -24,7 +24,7 @@ import org.elasticsearch.geometry.Point; import org.elasticsearch.geometry.Polygon; import org.elasticsearch.geometry.Rectangle; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; @@ -46,7 +46,7 @@ public Query shapeQuery( ) { validateIsShapeFieldType(fieldName, context); // CONTAINS queries are not supported by VECTOR strategy for indices created before version 7.5.0 (Lucene 8.3.0); - if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersion.V_7_5_0)) { + if (relation == ShapeRelation.CONTAINS && context.indexVersionCreated().before(IndexVersions.V_7_5_0)) { throw new QueryShardException(context, ShapeRelation.CONTAINS + " query relation not supported for Field [" + fieldName + "]."); } if (shape == null) { diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTiler.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTiler.java index 240d3a4c32ee7..05a7ec36db9f4 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTiler.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexGridTiler.java @@ -240,7 +240,7 @@ static class BoundedGeoHexGridTiler extends GeoHexGridTiler { private final GeoBoundingBox bbox; private final GeoHexVisitor visitor; private final int resolution; - private static final double FACTOR = 0.36; + private static final double FACTOR = 0.37; BoundedGeoHexGridTiler(int resolution, GeoBoundingBox bbox) { super(resolution); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianPointValuesSourceType.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianPointValuesSourceType.java index d1878dbba2856..29fdaa3c0cfd2 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianPointValuesSourceType.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianPointValuesSourceType.java @@ -16,7 +16,6 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.script.AggregationScript; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.MissingValues; import org.elasticsearch.search.aggregations.support.ValueType; @@ -26,6 +25,7 @@ import org.elasticsearch.xpack.spatial.index.fielddata.IndexCartesianPointFieldData; import java.io.IOException; +import java.util.function.LongSupplier; public class CartesianPointValuesSourceType implements Writeable, ValuesSourceType { @@ -61,7 +61,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { // TODO: also support the structured formats of points final CartesianPointValuesSource pointValuesSource = (CartesianPointValuesSource) valuesSource; diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianShapeValuesSourceType.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianShapeValuesSourceType.java index 5ec5cfc36b8b3..1b64eb7692fed 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianShapeValuesSourceType.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/CartesianShapeValuesSourceType.java @@ -12,7 +12,6 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.script.AggregationScript; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.MissingValues; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -23,6 +22,7 @@ import org.elasticsearch.xpack.spatial.index.fielddata.plain.CartesianShapeIndexFieldData; import java.io.IOException; +import java.util.function.LongSupplier; public class CartesianShapeValuesSourceType extends ShapeValuesSourceType { @@ -61,7 +61,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { CartesianShapeValuesSource shapeValuesSource = (CartesianShapeValuesSource) valuesSource; final CartesianShapeValues.CartesianShapeValue missing = CartesianShapeValues.EMPTY.missing(rawMissing.toString()); diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java index 56857605999c8..8286eab085c70 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/search/aggregations/support/GeoShapeValuesSourceType.java @@ -13,7 +13,6 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.script.AggregationScript; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.FieldContext; import org.elasticsearch.search.aggregations.support.MissingValues; import org.elasticsearch.search.aggregations.support.ValuesSource; @@ -22,6 +21,7 @@ import org.elasticsearch.xpack.spatial.index.fielddata.IndexShapeFieldData; import java.io.IOException; +import java.util.function.LongSupplier; public class GeoShapeValuesSourceType extends ShapeValuesSourceType { @@ -61,7 +61,7 @@ public ValuesSource replaceMissing( ValuesSource valuesSource, Object rawMissing, DocValueFormat docValueFormat, - AggregationContext context + LongSupplier nowInMillis ) { GeoShapeValuesSource shapeValuesSource = (GeoShapeValuesSource) valuesSource; final GeoShapeValues.GeoShapeValue missing = GeoShapeValues.EMPTY.missing(rawMissing.toString()); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index a6958f889e78e..0ddb38ea500f1 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType; @@ -85,7 +86,7 @@ public void testDefaultConfiguration() throws IOException { } public void testDefaultDocValueConfigurationOnPre7_8() throws IOException { - IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_7_7_0); + IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_7_7_0); DocumentMapper defaultMapper = createDocumentMapper(oldVersion, fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME); assertThat(fieldMapper, instanceOf(fieldMapperClass())); @@ -283,7 +284,7 @@ public void testInvalidCurrentVersion() { } public void testGeoShapeLegacyMerge() throws Exception { - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); MapperService m = createMapperService(version, fieldMapping(b -> b.field("type", getFieldName()))); Exception e = expectThrows( IllegalArgumentException.class, diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java index 5554ff7152b82..26d349a7ee5a6 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldMapperTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.Orientation; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.AbstractGeometryFieldMapper; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper; import org.elasticsearch.index.mapper.AbstractShapeGeometryFieldMapper.AbstractShapeGeometryFieldType; @@ -107,7 +108,7 @@ public void testDefaultConfiguration() throws IOException { public void testDefaultDocValueConfigurationOnPre8_4() throws IOException { // TODO verify which version this test is actually valid for (when PR is actually merged) - IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersion.V_7_0_0, IndexVersion.V_8_3_0); + IndexVersion oldVersion = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.V_7_0_0, IndexVersions.V_8_3_0); DocumentMapper defaultMapper = createDocumentMapper(oldVersion, fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper(FIELD_NAME); assertThat(fieldMapper, instanceOf(fieldMapperClass())); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java index 173ddced0817c..593656411eb41 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/GeoShapeQueryBuilderGeoShapeTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; @@ -88,7 +88,7 @@ protected GeoShapeQueryBuilder doCreateTestQueryBuilder(boolean indexedShape) { } if (ESTestCase.randomBoolean()) { SearchExecutionContext context = AbstractBuilderTestCase.createSearchExecutionContext(); - if (context.indexVersionCreated().onOrAfter(IndexVersion.V_7_5_0)) { // CONTAINS is only supported from version 7.5 + if (context.indexVersionCreated().onOrAfter(IndexVersions.V_7_5_0)) { // CONTAINS is only supported from version 7.5 if (shapeType == ShapeType.LINESTRING || shapeType == ShapeType.MULTILINESTRING) { builder.relation(ESTestCase.randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS)); } else { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java index 5dffac26be460..f560c8591ac56 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/LegacyGeoShapeWithDocValuesQueryTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.spatial.index.query; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoJson; import org.elasticsearch.common.settings.Settings; @@ -16,6 +15,7 @@ import org.elasticsearch.geometry.MultiPoint; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DocumentParsingException; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.legacygeo.mapper.LegacyGeoShapeFieldMapper; @@ -34,6 +34,7 @@ import static org.elasticsearch.index.query.QueryBuilders.geoIntersectionQuery; import static org.elasticsearch.index.query.QueryBuilders.geoShapeQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; @@ -70,7 +71,7 @@ protected void createMapping(String indexName, String fieldName, Settings settin ex.getMessage(), containsString("using deprecated parameters [tree] in mapper [" + fieldName + "] of type [geo_shape] is no longer allowed") ); - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); finalSetting = settings(version).put(settings).build(); indicesAdmin().prepareCreate(indexName).setMapping(xcb).setSettings(finalSetting).get(); } @@ -108,7 +109,7 @@ public void testPointsOnlyExplicit() throws Exception { ) ); - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); Settings settings = settings(version).build(); indicesAdmin().prepareCreate("geo_points_only").setMapping(mapping).setSettings(settings).get(); ensureGreen(); @@ -130,9 +131,7 @@ public void testPointsOnlyExplicit() throws Exception { .get(); // test that point was inserted - SearchResponse response = client().prepareSearch("geo_points_only").setQuery(matchAllQuery()).get(); - - assertEquals(2, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch("geo_points_only").setQuery(matchAllQuery()), 2L); } public void testPointsOnly() throws Exception { @@ -163,7 +162,7 @@ public void testPointsOnly() throws Exception { ) ); - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); Settings settings = settings(version).build(); indicesAdmin().prepareCreate("geo_points_only").setMapping(mapping).setSettings(settings).get(); ensureGreen(); @@ -182,10 +181,7 @@ public void testPointsOnly() throws Exception { } // test that point was inserted - SearchResponse response = client().prepareSearch("geo_points_only") - .setQuery(geoIntersectionQuery(defaultFieldName, geometry)) - .get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch("geo_points_only").setQuery(geoIntersectionQuery(defaultFieldName, geometry)), 1L); } public void testFieldAlias() throws IOException { @@ -214,7 +210,7 @@ public void testFieldAlias() throws IOException { containsString("using deprecated parameters [tree] in mapper [geo] of type [geo_shape] is no longer allowed") ); - IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersion.V_8_0_0); + IndexVersion version = IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0); Settings settings = settings(version).build(); indicesAdmin().prepareCreate(defaultIndexName).setMapping(mapping).setSettings(settings).get(); ensureGreen(); @@ -226,8 +222,7 @@ public void testFieldAlias() throws IOException { .setRefreshPolicy(IMMEDIATE) .get(); - SearchResponse response = client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", multiPoint)).get(); - assertEquals(1, response.getHits().getTotalHits().value); + assertHitCount(client().prepareSearch(defaultIndexName).setQuery(geoShapeQuery("alias", multiPoint)), 1L); } @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/86118") diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java index c3fd1288318d4..f890947698a97 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilderOverShapeTests.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.geometry.ShapeType; -import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xpack.spatial.util.ShapeTestUtils; @@ -33,7 +33,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws @Override protected ShapeRelation getShapeRelation(ShapeType type) { SearchExecutionContext context = createSearchExecutionContext(); - if (context.indexVersionCreated().onOrAfter(IndexVersion.V_7_5_0)) { // CONTAINS is only supported from version 7.5 + if (context.indexVersionCreated().onOrAfter(IndexVersions.V_7_5_0)) { // CONTAINS is only supported from version 7.5 if (type == ShapeType.LINESTRING || type == ShapeType.MULTILINESTRING) { return randomFrom(ShapeRelation.DISJOINT, ShapeRelation.INTERSECTS, ShapeRelation.CONTAINS); } else { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java index 4aa2340eef3b6..3f75b9830a96d 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregatorTests.java @@ -72,7 +72,7 @@ protected Point randomPoint() { } @Override - protected GeoBoundingBox randomBBox() { + protected GeoBoundingBox randomBBox(int precision) { GeoBoundingBox bbox = randomValueOtherThanMany( (b) -> b.top() > GeoTileUtils.LATITUDE_MASK || b.bottom() < -GeoTileUtils.LATITUDE_MASK, () -> { diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 13bf18e6c080d..412dec62f81f8 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -50,8 +50,11 @@ BuildParams.bwcVersions.withWireCompatible(v -> v.onOrAfter("7.10.3") && nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c->c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) - } - onlyIf("BWC tests disabled") { project.bwc_tests_enabled } + } + systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') + systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') + + onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } tasks.register(bwcTaskName(bwcVersion)) { diff --git a/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java b/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java index 5bc7e0dd219ca..bb9f707a7f61e 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java @@ -38,6 +38,8 @@ public class SqlCompatIT extends BaseRestSqlTestCase { + private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); + private static RestClient newNodesClient; private static RestClient oldNodesClient; private static TransportVersion bwcVersion; @@ -47,7 +49,7 @@ public void initBwcClients() throws IOException { if (newNodesClient == null) { assertNull(oldNodesClient); - TestNodes nodes = buildNodeAndVersions(client()); + TestNodes nodes = buildNodeAndVersions(client(), BWC_NODES_VERSION); bwcVersion = nodes.getBWCTransportVersion(); newNodesClient = buildClient( restClientSettings(), diff --git a/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java b/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java index acb78a2458c86..41ebd6adffd41 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/javaRestTest/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlSearchIT.java @@ -40,7 +40,12 @@ import static org.elasticsearch.xpack.ql.TestUtils.readResource; public class SqlSearchIT extends ESRestTestCase { - private static final Version VERSION_FIELD_QL_INTRODUCTION = Version.V_8_4_0; + + private static final String BWC_NODES_VERSION = System.getProperty("tests.bwc_nodes_version"); + + // TODO[lor]: replace this with feature-based checks when we have one + private static final boolean SUPPORTS_VERSION_FIELD_QL_INTRODUCTION = Version.fromString(BWC_NODES_VERSION).onOrAfter(Version.V_8_4_0); + private static final String index = "test_sql_mixed_versions"; private static int numShards; private static int numReplicas = 1; @@ -48,16 +53,14 @@ public class SqlSearchIT extends ESRestTestCase { private static TestNodes nodes; private static List newNodes; private static List bwcNodes; - private static Version bwcVersion; @Before public void createIndex() throws IOException { - nodes = buildNodeAndVersions(client()); + nodes = buildNodeAndVersions(client(), BWC_NODES_VERSION); numShards = nodes.size(); numDocs = randomIntBetween(numShards, 15); newNodes = new ArrayList<>(nodes.getNewNodes()); bwcNodes = new ArrayList<>(nodes.getBWCNodes()); - bwcVersion = nodes.getBWCNodes().get(0).version(); String mappings = readResource(SqlSearchIT.class.getResourceAsStream("/all_field_types.json")); createIndex( @@ -153,7 +156,7 @@ private Map prepareTestData( columns.add(columnInfo("scaled_float_field", "scaled_float")); columns.add(columnInfo("boolean_field", "boolean")); columns.add(columnInfo("ip_field", "ip")); - if (bwcVersion.onOrAfter(VERSION_FIELD_QL_INTRODUCTION)) { + if (SUPPORTS_VERSION_FIELD_QL_INTRODUCTION) { columns.add(columnInfo("version_field", "version")); } columns.add(columnInfo("text_field", "text")); @@ -187,7 +190,7 @@ private Map prepareTestData( builder.append("\"scaled_float_field\":" + fieldValues.computeIfAbsent("scaled_float_field", v -> 123.5d) + ","); builder.append("\"boolean_field\":" + fieldValues.computeIfAbsent("boolean_field", v -> randomBoolean()) + ","); builder.append("\"ip_field\":\"" + fieldValues.computeIfAbsent("ip_field", v -> "123.123.123.123") + "\","); - if (bwcVersion.onOrAfter(VERSION_FIELD_QL_INTRODUCTION)) { + if (SUPPORTS_VERSION_FIELD_QL_INTRODUCTION) { builder.append( "\"version_field\":\"" + fieldValues.computeIfAbsent("version_field", v -> randomInt() + "." + randomInt() + "." + randomInt()) diff --git a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle index b56d8ff211990..b42ae29e257f0 100644 --- a/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle +++ b/x-pack/plugin/sql/qa/server/multi-cluster-with-security/build.gradle @@ -1,4 +1,5 @@ import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' @@ -25,7 +26,7 @@ def javaRestTestClusterReg = testClusters.register('javaRestTest') { setting 'xpack.watcher.enabled', 'false' setting 'cluster.remote.my_remote_cluster.seeds', { remoteClusterReg.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE setting 'cluster.remote.connections_per_cluster', "1" setting 'xpack.security.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java index af14866011c05..7bffa67fe2a52 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/AsyncSqlSearchActionIT.java @@ -81,7 +81,6 @@ private void prepareIndex() throws Exception { assertAcked( indicesAdmin().prepareCreate("test") .setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date", "i", "type=integer") - .get() ); createIndex("idx_unmapped"); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/RestSqlCancellationIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/RestSqlCancellationIT.java index 52a756786f00e..0100634766bfe 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/RestSqlCancellationIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/RestSqlCancellationIT.java @@ -73,9 +73,7 @@ protected Collection> nodePlugins() { @TestLogging(value = "org.elasticsearch.xpack.sql:TRACE", reason = "debug") public void testRestCancellation() throws Exception { assertAcked( - indicesAdmin().prepareCreate("test") - .setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") - .get() + indicesAdmin().prepareCreate("test").setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") ); createIndex("idx_unmapped"); diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java index bd80543a26df3..aba659de53874 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java @@ -39,9 +39,7 @@ public void shutdownExec() { public void testCancellation() throws Exception { assertAcked( - indicesAdmin().prepareCreate("test") - .setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") - .get() + indicesAdmin().prepareCreate("test").setMapping("val", "type=integer", "event_type", "type=keyword", "@timestamp", "type=date") ); createIndex("idx_unmapped"); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 75888000384dd..43051e9e16160 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -9,6 +9,8 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.Scope; @@ -26,10 +28,12 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.ql.util.LoggingUtils.logOnFailure; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.URL_PARAM_DELIMITER; @ServerlessScope(Scope.PUBLIC) public class RestSqlQueryAction extends BaseRestHandler { + private static final Logger LOGGER = LogManager.getLogger(RestSqlQueryAction.class); @Override public List routes() { @@ -52,7 +56,14 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli return channel -> { RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); - cancellableClient.execute(SqlQueryAction.INSTANCE, sqlRequest, new SqlResponseListener(channel, request, sqlRequest)); + cancellableClient.execute( + SqlQueryAction.INSTANCE, + sqlRequest, + new SqlResponseListener(channel, request, sqlRequest).delegateResponse((l, ex) -> { + logOnFailure(LOGGER, ex); + l.onFailure(ex); + }) + ); }; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java index c5f8eee643858..c6e0b5067ee08 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlPlugin.java @@ -11,30 +11,20 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -88,24 +78,13 @@ protected XPackLicenseState getLicenseState() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - - return createComponents(client, environment.settings(), clusterService, namedWriteableRegistry); + public Collection createComponents(PluginServices services) { + return createComponents( + services.client(), + services.environment().settings(), + services.clusterService(), + services.namedWriteableRegistry() + ); } /** diff --git a/x-pack/plugin/stack/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml b/x-pack/plugin/stack/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml index 51d5edcb764f3..b1ac564a53715 100644 --- a/x-pack/plugin/stack/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml +++ b/x-pack/plugin/stack/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stack/10_basic.yml @@ -9,34 +9,58 @@ setup: - do: ilm.get_lifecycle: policy: "logs" + - do: + ilm.get_lifecycle: + policy: "logs@lifecycle" - do: ilm.get_lifecycle: policy: "metrics" + - do: + ilm.get_lifecycle: + policy: "metrics@lifecycle" - do: cluster.get_component_template: name: data-streams-mappings + - do: + cluster.get_component_template: + name: data-streams@mappings - do: cluster.get_component_template: name: logs-mappings + - do: + cluster.get_component_template: + name: logs@mappings - do: cluster.get_component_template: name: logs-settings + - do: + cluster.get_component_template: + name: logs@settings - do: cluster.get_component_template: name: metrics-mappings + - do: + cluster.get_component_template: + name: metrics@mappings - do: cluster.get_component_template: name: metrics-settings + - do: + cluster.get_component_template: + name: metrics@settings - do: cluster.get_component_template: name: metrics-tsdb-settings + - do: + cluster.get_component_template: + name: metrics@tsdb-settings - do: indices.get_index_template: diff --git a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java index 9832c789e2a99..ea3286e96160c 100644 --- a/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java +++ b/x-pack/plugin/stack/src/javaRestTest/java/org/elasticsearch/xpack/stack/EcsDynamicTemplatesIT.java @@ -53,7 +53,7 @@ public class EcsDynamicTemplatesIT extends ESRestTestCase { public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("mapper-extras").module("wildcard").build(); // The dynamic templates we test against - public static final String ECS_DYNAMIC_TEMPLATES_FILE = "ecs-dynamic-mappings.json"; + public static final String ECS_DYNAMIC_TEMPLATES_FILE = "ecs@mappings.json"; // The current ECS state (branch main) containing all fields in flattened form private static final String ECS_FLAT_FILE_URL = "https://raw.githubusercontent.com/elastic/ecs/main/generated/ecs/ecs_flat.yml"; diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java new file mode 100644 index 0000000000000..26bcfb66bf818 --- /dev/null +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/LegacyStackTemplateRegistry.java @@ -0,0 +1,267 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.stack; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; +import org.elasticsearch.xpack.core.template.IndexTemplateConfig; +import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; +import org.elasticsearch.xpack.core.template.IngestPipelineConfig; +import org.elasticsearch.xpack.core.template.LifecyclePolicyConfig; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.stack.StackTemplateRegistry.STACK_TEMPLATES_ENABLED; + +@Deprecated(since = "8.12.0", forRemoval = true) +public class LegacyStackTemplateRegistry extends IndexTemplateRegistry { + private static final Logger logger = LogManager.getLogger(LegacyStackTemplateRegistry.class); + + // Current version of the registry requires all nodes to be at least 8.9.0. + public static final Version MIN_NODE_VERSION = Version.V_8_9_0; + + // The stack template registry version. This number must be incremented when we make changes + // to built-in templates. + public static final int REGISTRY_VERSION = 3; + + public static final String TEMPLATE_VERSION_VARIABLE = "xpack.stack.template.version"; + + private final ClusterService clusterService; + private volatile boolean stackTemplateEnabled; + + // General mappings conventions for any data that ends up in a data stream + public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams-mappings"; + + // ECS dynamic mappings + public static final String ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME = "ecs@dynamic_templates"; + + ////////////////////////////////////////////////////////// + // Built in ILM policies for users to use + ////////////////////////////////////////////////////////// + public static final String ILM_7_DAYS_POLICY_NAME = "7-days-default"; + public static final String ILM_30_DAYS_POLICY_NAME = "30-days-default"; + public static final String ILM_90_DAYS_POLICY_NAME = "90-days-default"; + public static final String ILM_180_DAYS_POLICY_NAME = "180-days-default"; + public static final String ILM_365_DAYS_POLICY_NAME = "365-days-default"; + + ////////////////////////////////////////////////////////// + // Logs components (for matching logs-*-* indices) + ////////////////////////////////////////////////////////// + public static final String LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "logs-mappings"; + public static final String LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME = "logs-settings"; + public static final String LOGS_ILM_POLICY_NAME = "logs"; + + ////////////////////////////////////////////////////////// + // Metrics components (for matching metric-*-* indices) + ////////////////////////////////////////////////////////// + public static final String METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "metrics-mappings"; + public static final String METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics-settings"; + public static final String METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics-tsdb-settings"; + public static final String METRICS_ILM_POLICY_NAME = "metrics"; + + ////////////////////////////////////////////////////////// + // Synthetics components (for matching synthetics-*-* indices) + ////////////////////////////////////////////////////////// + public static final String SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "synthetics-mappings"; + public static final String SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "synthetics-settings"; + public static final String SYNTHETICS_ILM_POLICY_NAME = "synthetics"; + + public LegacyStackTemplateRegistry( + Settings nodeSettings, + ClusterService clusterService, + ThreadPool threadPool, + Client client, + NamedXContentRegistry xContentRegistry + ) { + super(nodeSettings, clusterService, threadPool, client, xContentRegistry); + this.clusterService = clusterService; + this.stackTemplateEnabled = STACK_TEMPLATES_ENABLED.get(nodeSettings); + } + + @Override + public void initialize() { + super.initialize(); + clusterService.getClusterSettings().addSettingsUpdateConsumer(STACK_TEMPLATES_ENABLED, this::updateEnabledSetting); + } + + private void updateEnabledSetting(boolean newValue) { + if (newValue) { + this.stackTemplateEnabled = true; + } else { + logger.info( + "stack composable templates [{}] and component templates [{}] will not be installed or reinstalled", + String.join(",", getComposableTemplateConfigs().keySet()), + String.join(",", getComponentTemplateConfigs().keySet()) + ); + this.stackTemplateEnabled = false; + } + } + + private static final List LIFECYCLE_POLICY_CONFIGS = List.of( + new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs@lifecycle.json"), + new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics@lifecycle.json"), + new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics@lifecycle.json"), + new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/7-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/30-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/90-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/180-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/365-days@lifecycle.json") + ); + + @Override + protected List getLifecycleConfigs() { + return LIFECYCLE_POLICY_CONFIGS; + } + + @Override + protected List getLifecyclePolicies() { + if (stackTemplateEnabled) { + return lifecyclePolicies; + } else { + return Collections.emptyList(); + } + } + + private static final Map COMPONENT_TEMPLATE_CONFIGS; + + static { + final Map componentTemplates = new HashMap<>(); + for (IndexTemplateConfig config : List.of( + new IndexTemplateConfig( + DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME, + "/data-streams@mappings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME, + "/logs@mappings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME, + "/ecs@mappings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME, + "/logs@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, + "/metrics@mappings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME, + "/metrics@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME, + "/metrics@tsdb-settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, + "/synthetics@mappings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, + "/synthetics@settings.json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + )) { + try { + componentTemplates.put( + config.getTemplateName(), + ComponentTemplate.parse(JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) + ); + } catch (IOException e) { + throw new AssertionError(e); + } + } + COMPONENT_TEMPLATE_CONFIGS = Map.copyOf(componentTemplates); + } + + @Override + protected Map getComponentTemplateConfigs() { + if (stackTemplateEnabled) { + return COMPONENT_TEMPLATE_CONFIGS; + } else { + return Map.of(); + } + } + + @Override + protected Map getComposableTemplateConfigs() { + return Map.of(); + } + + private static final List INGEST_PIPELINE_CONFIGS = List.of( + new IngestPipelineConfig("logs@json-message", "/logs@json-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), + new IngestPipelineConfig("logs-default-pipeline", "/logs@default-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE) + ); + + @Override + protected List getIngestPipelines() { + return INGEST_PIPELINE_CONFIGS; + } + + @Override + protected String getOrigin() { + return ClientHelper.STACK_ORIGIN; + } + + @Override + protected boolean requiresMasterNode() { + // Stack templates use the composable index template and component APIs, + // these APIs aren't supported in 7.7 and earlier and in mixed cluster + // environments this can cause a lot of ActionNotFoundTransportException + // errors in the logs during rolling upgrades. If these templates + // are only installed via elected master node then the APIs are always + // there and the ActionNotFoundTransportException errors are then prevented. + return true; + } + + @Override + protected boolean isClusterReady(ClusterChangedEvent event) { + // Ensure current version of the components are installed only once all nodes are updated to 8.9.0. + // This is necessary to prevent an error caused nby the usage of the ignore_missing_pipeline property + // in the pipeline processor, which has been introduced only in 8.9.0 + Version minNodeVersion = event.state().nodes().getMinNodeVersion(); + return minNodeVersion.onOrAfter(MIN_NODE_VERSION); + } +} diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java index de0858e59900c..1fac8a28aa5da 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackPlugin.java @@ -6,29 +6,14 @@ */ package org.elasticsearch.xpack.stack; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; -import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.function.Supplier; public class StackPlugin extends Plugin implements ActionPlugin { private final Settings settings; @@ -43,24 +28,23 @@ public List> getSettings() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver indexNameExpressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - StackTemplateRegistry templateRegistry = new StackTemplateRegistry(settings, clusterService, threadPool, client, xContentRegistry); - templateRegistry.initialize(); - return Collections.singleton(templateRegistry); + public Collection createComponents(PluginServices services) { + LegacyStackTemplateRegistry legacyStackTemplateRegistry = new LegacyStackTemplateRegistry( + settings, + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() + ); + legacyStackTemplateRegistry.initialize(); + StackTemplateRegistry stackTemplateRegistry = new StackTemplateRegistry( + settings, + services.clusterService(), + services.threadPool(), + services.client(), + services.xContentRegistry() + ); + stackTemplateRegistry.initialize(); + return List.of(legacyStackTemplateRegistry, stackTemplateRegistry); } } diff --git a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java index 9b2d7439d0756..f81697982c803 100644 --- a/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java +++ b/x-pack/plugin/stack/src/main/java/org/elasticsearch/xpack/stack/StackTemplateRegistry.java @@ -29,7 +29,6 @@ import org.elasticsearch.xpack.core.template.LifecyclePolicyConfig; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -56,43 +55,43 @@ public class StackTemplateRegistry extends IndexTemplateRegistry { private volatile boolean stackTemplateEnabled; // General mappings conventions for any data that ends up in a data stream - public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams-mappings"; + public static final String DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "data-streams@mappings"; // ECS dynamic mappings - public static final String ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME = "ecs@dynamic_templates"; + public static final String ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME = "ecs@mappings"; ////////////////////////////////////////////////////////// // Built in ILM policies for users to use ////////////////////////////////////////////////////////// - public static final String ILM_7_DAYS_POLICY_NAME = "7-days-default"; - public static final String ILM_30_DAYS_POLICY_NAME = "30-days-default"; - public static final String ILM_90_DAYS_POLICY_NAME = "90-days-default"; - public static final String ILM_180_DAYS_POLICY_NAME = "180-days-default"; - public static final String ILM_365_DAYS_POLICY_NAME = "365-days-default"; + public static final String ILM_7_DAYS_POLICY_NAME = "7-days@lifecycle"; + public static final String ILM_30_DAYS_POLICY_NAME = "30-days@lifecycle"; + public static final String ILM_90_DAYS_POLICY_NAME = "90-days@lifecycle"; + public static final String ILM_180_DAYS_POLICY_NAME = "180-days@lifecycle"; + public static final String ILM_365_DAYS_POLICY_NAME = "365-days@lifecycle"; ////////////////////////////////////////////////////////// // Logs components (for matching logs-*-* indices) ////////////////////////////////////////////////////////// - public static final String LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "logs-mappings"; - public static final String LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME = "logs-settings"; - public static final String LOGS_ILM_POLICY_NAME = "logs"; + public static final String LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "logs@mappings"; + public static final String LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME = "logs@settings"; + public static final String LOGS_ILM_POLICY_NAME = "logs@lifecycle"; public static final String LOGS_INDEX_TEMPLATE_NAME = "logs"; ////////////////////////////////////////////////////////// // Metrics components (for matching metric-*-* indices) ////////////////////////////////////////////////////////// - public static final String METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "metrics-mappings"; - public static final String METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics-settings"; - public static final String METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics-tsdb-settings"; - public static final String METRICS_ILM_POLICY_NAME = "metrics"; + public static final String METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "metrics@mappings"; + public static final String METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics@settings"; + public static final String METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME = "metrics@tsdb-settings"; + public static final String METRICS_ILM_POLICY_NAME = "metrics@lifecycle"; public static final String METRICS_INDEX_TEMPLATE_NAME = "metrics"; ////////////////////////////////////////////////////////// // Synthetics components (for matching synthetics-*-* indices) ////////////////////////////////////////////////////////// - public static final String SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "synthetics-mappings"; - public static final String SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "synthetics-settings"; - public static final String SYNTHETICS_ILM_POLICY_NAME = "synthetics"; + public static final String SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME = "synthetics@mappings"; + public static final String SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME = "synthetics@settings"; + public static final String SYNTHETICS_ILM_POLICY_NAME = "synthetics@lifecycle"; public static final String SYNTHETICS_INDEX_TEMPLATE_NAME = "synthetics"; /////////////////////////////////// @@ -132,14 +131,14 @@ private void updateEnabledSetting(boolean newValue) { } private static final List LIFECYCLE_POLICY_CONFIGS = List.of( - new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs-policy.json"), - new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics-policy.json"), - new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics-policy.json"), - new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/" + ILM_7_DAYS_POLICY_NAME + ".json"), - new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/" + ILM_30_DAYS_POLICY_NAME + ".json"), - new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/" + ILM_90_DAYS_POLICY_NAME + ".json"), - new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/" + ILM_180_DAYS_POLICY_NAME + ".json"), - new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/" + ILM_365_DAYS_POLICY_NAME + ".json") + new LifecyclePolicyConfig(LOGS_ILM_POLICY_NAME, "/logs@lifecycle.json"), + new LifecyclePolicyConfig(METRICS_ILM_POLICY_NAME, "/metrics@lifecycle.json"), + new LifecyclePolicyConfig(SYNTHETICS_ILM_POLICY_NAME, "/synthetics@lifecycle.json"), + new LifecyclePolicyConfig(ILM_7_DAYS_POLICY_NAME, "/7-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_30_DAYS_POLICY_NAME, "/30-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_90_DAYS_POLICY_NAME, "/90-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_180_DAYS_POLICY_NAME, "/180-days@lifecycle.json"), + new LifecyclePolicyConfig(ILM_365_DAYS_POLICY_NAME, "/365-days@lifecycle.json") ); @Override @@ -149,11 +148,7 @@ protected List getLifecycleConfigs() { @Override protected List getLifecyclePolicies() { - if (stackTemplateEnabled) { - return lifecyclePolicies; - } else { - return Collections.emptyList(); - } + return lifecyclePolicies; } private static final Map COMPONENT_TEMPLATE_CONFIGS; @@ -163,55 +158,55 @@ protected List getLifecyclePolicies() { for (IndexTemplateConfig config : List.of( new IndexTemplateConfig( DATA_STREAMS_MAPPINGS_COMPONENT_TEMPLATE_NAME, - "/data-streams-mappings.json", + "/data-streams@mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( LOGS_MAPPINGS_COMPONENT_TEMPLATE_NAME, - "/logs-mappings.json", + "/logs@mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( ECS_DYNAMIC_MAPPINGS_COMPONENT_TEMPLATE_NAME, - "/ecs-dynamic-mappings.json", + "/ecs@mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( LOGS_SETTINGS_COMPONENT_TEMPLATE_NAME, - "/logs-settings.json", + "/logs@settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( METRICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, - "/metrics-mappings.json", + "/metrics@mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( METRICS_SETTINGS_COMPONENT_TEMPLATE_NAME, - "/metrics-settings.json", + "/metrics@settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( METRICS_TSDB_SETTINGS_COMPONENT_TEMPLATE_NAME, - "/metrics-tsdb-settings.json", + "/metrics@tsdb-settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( SYNTHETICS_MAPPINGS_COMPONENT_TEMPLATE_NAME, - "/synthetics-mappings.json", + "/synthetics@mappings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ), new IndexTemplateConfig( SYNTHETICS_SETTINGS_COMPONENT_TEMPLATE_NAME, - "/synthetics-settings.json", + "/synthetics@settings.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ) @@ -230,20 +225,16 @@ protected List getLifecyclePolicies() { @Override protected Map getComponentTemplateConfigs() { - if (stackTemplateEnabled) { - return COMPONENT_TEMPLATE_CONFIGS; - } else { - return Map.of(); - } + return COMPONENT_TEMPLATE_CONFIGS; } private static final Map COMPOSABLE_INDEX_TEMPLATE_CONFIGS = parseComposableTemplates( - new IndexTemplateConfig(LOGS_INDEX_TEMPLATE_NAME, "/logs-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new IndexTemplateConfig(METRICS_INDEX_TEMPLATE_NAME, "/metrics-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new IndexTemplateConfig(SYNTHETICS_INDEX_TEMPLATE_NAME, "/synthetics-template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), + new IndexTemplateConfig(LOGS_INDEX_TEMPLATE_NAME, "/logs@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), + new IndexTemplateConfig(METRICS_INDEX_TEMPLATE_NAME, "/metrics@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), + new IndexTemplateConfig(SYNTHETICS_INDEX_TEMPLATE_NAME, "/synthetics@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), new IndexTemplateConfig( KIBANA_REPORTING_INDEX_TEMPLATE_NAME, - "/kibana-reporting-template.json", + "/kibana-reporting@template.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE ) @@ -259,14 +250,8 @@ protected Map getComposableTemplateConfigs() { } private static final List INGEST_PIPELINE_CONFIGS = List.of( - new IngestPipelineConfig("logs@json-message", "/logs-json-message-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), - new IngestPipelineConfig( - "logs-default-pipeline", - "/logs-default-pipeline.json", - REGISTRY_VERSION, - TEMPLATE_VERSION_VARIABLE, - Collections.singletonList("logs@json-message") - ) + new IngestPipelineConfig("logs@json-pipeline", "/logs@json-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE), + new IngestPipelineConfig("logs@default-pipeline", "/logs@default-pipeline.json", REGISTRY_VERSION, TEMPLATE_VERSION_VARIABLE) ); @Override diff --git a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java index 4128e6a4af7ec..8e0cbc3f82f35 100644 --- a/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java +++ b/x-pack/plugin/stack/src/test/java/org/elasticsearch/xpack/stack/StackTemplateRegistryTests.java @@ -63,10 +63,12 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; @@ -92,7 +94,7 @@ public void tearDown() throws Exception { threadPool.shutdownNow(); } - public void testDisabledDoesNotAddTemplates() { + public void testDisabledDoesNotAddIndexTemplates() { Settings settings = Settings.builder().put(StackTemplateRegistry.STACK_TEMPLATES_ENABLED.getKey(), false).build(); StackTemplateRegistry disabledRegistry = new StackTemplateRegistry( settings, @@ -101,9 +103,34 @@ public void testDisabledDoesNotAddTemplates() { client, NamedXContentRegistry.EMPTY ); - assertThat(disabledRegistry.getComponentTemplateConfigs(), anEmptyMap()); assertThat(disabledRegistry.getComposableTemplateConfigs(), anEmptyMap()); - assertThat(disabledRegistry.getLifecyclePolicies(), hasSize(0)); + } + + public void testDisabledStillAddsComponentTemplatesAndIlmPolicies() { + Settings settings = Settings.builder().put(StackTemplateRegistry.STACK_TEMPLATES_ENABLED.getKey(), false).build(); + StackTemplateRegistry disabledRegistry = new StackTemplateRegistry( + settings, + clusterService, + threadPool, + client, + NamedXContentRegistry.EMPTY + ); + assertThat(disabledRegistry.getComponentTemplateConfigs(), not(anEmptyMap())); + assertThat( + disabledRegistry.getComponentTemplateConfigs() + .keySet() + .stream() + // We have a naming convention that internal component templates contain `@`. See also put-component-template.asciidoc. + .filter(t -> t.contains("@") == false) + .collect(Collectors.toSet()), + empty() + ); + assertThat(disabledRegistry.getLifecyclePolicies(), not(empty())); + assertThat( + // We have a naming convention that internal ILM policies contain `@`. See also put-lifecycle.asciidoc. + disabledRegistry.getLifecyclePolicies().stream().filter(p -> p.getName().contains("@") == false).collect(Collectors.toSet()), + empty() + ); } public void testThatNonExistingTemplatesAreAddedImmediately() throws Exception { @@ -356,7 +383,7 @@ public void testMissingNonRequiredTemplates() throws Exception { assertThat(putComposableTemplateRequest.name(), equalTo("syslog")); ComposableIndexTemplate composableIndexTemplate = putComposableTemplateRequest.indexTemplate(); assertThat(composableIndexTemplate.composedOf(), hasSize(2)); - assertThat(composableIndexTemplate.composedOf().get(0), equalTo("logs-settings")); + assertThat(composableIndexTemplate.composedOf().get(0), equalTo("logs@settings")); assertThat(composableIndexTemplate.composedOf().get(1), equalTo("syslog@custom")); assertThat(composableIndexTemplate.getIgnoreMissingComponentTemplates(), hasSize(1)); assertThat(composableIndexTemplate.getIgnoreMissingComponentTemplates().get(0), equalTo("syslog@custom")); diff --git a/x-pack/plugin/stack/src/test/resources/non-required-template.json b/x-pack/plugin/stack/src/test/resources/non-required-template.json index bb9731c96a765..47391a5b1d4fc 100644 --- a/x-pack/plugin/stack/src/test/resources/non-required-template.json +++ b/x-pack/plugin/stack/src/test/resources/non-required-template.json @@ -3,7 +3,7 @@ "priority": 100, "data_stream": {}, "composed_of": [ - "logs-settings", + "logs@settings", "syslog@custom" ], "ignore_missing_component_templates": ["syslog@custom"], diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index 7c0eaf1450567..ef34db62e5e03 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -1,6 +1,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -43,7 +44,7 @@ testClusters.register('mixed-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'cluster.remote.my_remote_cluster.seeds', { remoteCluster.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE setting 'cluster.remote.connections_per_cluster', "1" user username: "test_user", password: "x-pack-test-password" diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java index 0f1af34ed5ee2..4321306870bd1 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformGetAndGetStatsIT.java @@ -89,19 +89,19 @@ public void testGetAndGetStats() throws Exception { String authHeader = randomFrom(BASIC_AUTH_VALUE_TRANSFORM_USER, BASIC_AUTH_VALUE_TRANSFORM_ADMIN); // Check all the different ways to retrieve transform stats - Request getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "_stats", authHeader); + Request getRequest = createRequestWithAuthAndTimeout("GET", getTransformEndpoint() + "_stats", authHeader, randomTimeout()); Map stats = entityAsMap(client().performRequest(getRequest)); assertEquals(3, XContentMapValues.extractValue("count", stats)); - getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "_all/_stats", authHeader); + getRequest = createRequestWithAuthAndTimeout("GET", getTransformEndpoint() + "_all/_stats", authHeader, randomTimeout()); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(3, XContentMapValues.extractValue("count", stats)); - getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "*/_stats", authHeader); + getRequest = createRequestWithAuthAndTimeout("GET", getTransformEndpoint() + "*/_stats", authHeader, randomTimeout()); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(3, XContentMapValues.extractValue("count", stats)); - getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "pivot_1,pivot_2/_stats", authHeader); + getRequest = createRequestWithAuthAndTimeout("GET", getTransformEndpoint() + "pivot_1,pivot_2/_stats", authHeader, randomTimeout()); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(2, XContentMapValues.extractValue("count", stats)); - getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "pivot_*/_stats", authHeader); + getRequest = createRequestWithAuthAndTimeout("GET", getTransformEndpoint() + "pivot_*/_stats", authHeader, randomTimeout()); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(3, XContentMapValues.extractValue("count", stats)); @@ -122,7 +122,7 @@ public void testGetAndGetStats() throws Exception { } // only pivot_1 - getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "pivot_1/_stats", authHeader); + getRequest = createRequestWithAuthAndTimeout("GET", getTransformEndpoint() + "pivot_1/_stats", authHeader, randomTimeout()); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", stats)); @@ -133,7 +133,12 @@ public void testGetAndGetStats() throws Exception { assertEquals(1, XContentMapValues.extractValue("checkpointing.last.checkpoint", transformsStats.get(0))); // only continuous - getRequest = createRequestWithAuth("GET", getTransformEndpoint() + "pivot_continuous/_stats", authHeader); + getRequest = createRequestWithAuthAndTimeout( + "GET", + getTransformEndpoint() + "pivot_continuous/_stats", + authHeader, + randomTimeout() + ); stats = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", stats)); @@ -300,7 +305,7 @@ private List> verifyGetStatsResponse(String path, int expect // Alternate testing between admin and lowly user, as both should be able to get the configs and stats String authHeader = randomFrom(BASIC_AUTH_VALUE_TRANSFORM_USER, BASIC_AUTH_VALUE_TRANSFORM_ADMIN); - Request request = createRequestWithAuth("GET", path, authHeader); + Request request = createRequestWithAuthAndTimeout("GET", path, authHeader, randomTimeout()); request.setOptions(RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE)); Response response = client().performRequest(request); Map stats = entityAsMap(response); @@ -354,7 +359,12 @@ public void testGetProgressStatsWithPivotQuery() throws Exception { // Alternate testing between admin and lowly user, as both should be able to get the configs and stats String authHeader = randomFrom(BASIC_AUTH_VALUE_TRANSFORM_USER, BASIC_AUTH_VALUE_TRANSFORM_ADMIN); - Request getRequest = createRequestWithAuth("GET", getTransformEndpoint() + transformId + "/_stats", authHeader); + Request getRequest = createRequestWithAuthAndTimeout( + "GET", + getTransformEndpoint() + transformId + "/_stats", + authHeader, + randomTimeout() + ); Map stats = entityAsMap(client().performRequest(getRequest)); assertEquals(1, XContentMapValues.extractValue("count", stats)); List> transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); @@ -420,7 +430,12 @@ public void testGetStatsWithContinuous() throws Exception { assertThat(createTransformResponse.get("acknowledged"), equalTo(Boolean.TRUE)); startAndWaitForContinuousTransform(transformId, transformDest, null); - Request getRequest = createRequestWithAuth("GET", getTransformEndpoint() + transformId + "/_stats", null); + Request getRequest = createRequestWithAuthAndTimeout( + "GET", + getTransformEndpoint() + transformId + "/_stats", + null, + randomTimeout() + ); Map stats = entityAsMap(client().performRequest(getRequest)); List> transformsStats = (List>) XContentMapValues.extractValue("transforms", stats); assertEquals(1, transformsStats.size()); @@ -572,4 +587,16 @@ private static String transformConfig() { } """; } + + private Request createRequestWithAuthAndTimeout(String method, String endpoint, String authHeader, String timeout) { + Request request = createRequestWithAuth(method, endpoint, authHeader); + if (timeout != null) { + request.addParameter("timeout", timeout); + } + return request; + } + + private static String randomTimeout() { + return randomFrom((String) null, "5s", "30s", "1m"); + } } diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java index 49adb7c194b22..9b6b67e76c01c 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -24,6 +25,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.cache.query.QueryCacheStats; import org.elasticsearch.index.cache.request.RequestCacheStats; @@ -43,6 +45,7 @@ import org.elasticsearch.index.warmer.WarmerStats; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.search.suggest.completion.CompletionStats; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.transport.ActionNotFoundTransportException; @@ -405,7 +408,8 @@ private static void getCheckpoint( (l, infoBuilder) -> l.onResponse(infoBuilder.build()) ); transformCheckpointService.getCheckpointingInfo( - mockClientForCheckpointing, + new ParentTaskAssigningClient(mockClientForCheckpointing, new TaskId("dummy-node:123456")), + TimeValue.timeValueSeconds(5), transformId, lastCheckpointNumber, nextCheckpointPosition, diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java index 2c69b0b2c8ca5..bb159856b965d 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java @@ -7,18 +7,28 @@ package org.elasticsearch.xpack.transform.checkpoint; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; import java.util.Arrays; import java.util.Comparator; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; + /** * Test suite for checkpointing using transform getcheckpoint API */ @@ -35,7 +45,8 @@ public void testGetCheckpoint() throws Exception { final GetCheckpointAction.Request request = new GetCheckpointAction.Request( new String[] { indexNamePrefix + "*" }, - IndicesOptions.LENIENT_EXPAND_OPEN + IndicesOptions.LENIENT_EXPAND_OPEN, + TimeValue.timeValueSeconds(5) ); final GetCheckpointAction.Response response = client().execute(GetCheckpointAction.INSTANCE, request).get(); @@ -88,4 +99,40 @@ public void testGetCheckpoint() throws Exception { ); } + public void testGetCheckpointTimeoutExceeded() throws Exception { + final String indexNamePrefix = "test_index-"; + final int indices = 100; + final int shards = 5; + + for (int i = 0; i < indices; ++i) { + indicesAdmin().prepareCreate(indexNamePrefix + i).setSettings(indexSettings(shards, 0)).get(); + } + + final GetCheckpointAction.Request request = new GetCheckpointAction.Request( + new String[] { indexNamePrefix + "*" }, + IndicesOptions.LENIENT_EXPAND_OPEN, + TimeValue.ZERO + ); + + CountDownLatch latch = new CountDownLatch(1); + SetOnce finalException = new SetOnce<>(); + client().execute(GetCheckpointAction.INSTANCE, request, ActionListener.wrap(r -> latch.countDown(), e -> { + finalException.set(e); + latch.countDown(); + })); + latch.await(10, TimeUnit.SECONDS); + + Exception e = finalException.get(); + if (e != null) { + assertThat(e, is(instanceOf(ElasticsearchTimeoutException.class))); + assertThat( + "Message was: " + e.getMessage(), + e.getMessage(), + startsWith("Transform checkpointing timed out on node [node_s_0] after [0ms]") + ); + } else { + // Due to system clock usage, the timeout does not always occur where it should. + // We cannot mock the clock so we just have to live with it. + } + } } diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java index 521a1deafe797..1411576e61d58 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; @@ -49,6 +50,7 @@ import org.junit.After; import org.junit.Before; +import java.time.Clock; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -97,12 +99,15 @@ public void setUp() throws Exception { @Override protected void onSendRequest(long requestId, String action, TransportRequest request, DiscoveryNode node) { if (action.equals(GetCheckpointNodeAction.NAME)) { - getCheckpointNodeAction.execute(null, (GetCheckpointNodeAction.Request) request, ActionListener.wrap(r -> { - this.handleResponse(requestId, r); - }, e -> { - this.handleError(requestId, new TransportException(e.getMessage(), e)); - - })); + GetCheckpointNodeAction.Request getCheckpointNodeActionRequest = (GetCheckpointNodeAction.Request) request; + Task task = getCheckpointNodeActionRequest.createTask(123, "type", "action", null, Map.of()); + getCheckpointNodeAction.execute( + task, + getCheckpointNodeActionRequest, + ActionListener.wrap(r -> { this.handleResponse(requestId, r); }, e -> { + this.handleError(requestId, new TransportException(e.getMessage(), e)); + }) + ); } } }; @@ -149,19 +154,23 @@ public void tearDown() throws Exception { } public void testEmptyCheckpoint() throws InterruptedException { - GetCheckpointAction.Request request = new GetCheckpointAction.Request(Strings.EMPTY_ARRAY, IndicesOptions.LENIENT_EXPAND_OPEN); + GetCheckpointAction.Request request = new GetCheckpointAction.Request( + Strings.EMPTY_ARRAY, + IndicesOptions.LENIENT_EXPAND_OPEN, + TimeValue.timeValueSeconds(5) + ); assertCheckpointAction(request, response -> { assertNotNull(response.getCheckpoints()); Map checkpoints = response.getCheckpoints(); assertTrue(checkpoints.isEmpty()); - }); } public void testSingleIndexRequest() throws InterruptedException { GetCheckpointAction.Request request = new GetCheckpointAction.Request( new String[] { indexNamePattern + "0" }, - IndicesOptions.LENIENT_EXPAND_OPEN + IndicesOptions.LENIENT_EXPAND_OPEN, + TimeValue.timeValueSeconds(5) ); assertCheckpointAction(request, response -> { @@ -173,12 +182,15 @@ public void testSingleIndexRequest() throws InterruptedException { assertEquals(42 + i, checkpoints.get(indexNamePattern + "0")[i]); } assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); - }); } public void testMultiIndexRequest() throws InterruptedException { - GetCheckpointAction.Request request = new GetCheckpointAction.Request(testIndices, IndicesOptions.LENIENT_EXPAND_OPEN); + GetCheckpointAction.Request request = new GetCheckpointAction.Request( + testIndices, + IndicesOptions.LENIENT_EXPAND_OPEN, + TimeValue.timeValueSeconds(5) + ); assertCheckpointAction(request, response -> { assertNotNull(response.getCheckpoints()); Map checkpoints = response.getCheckpoints(); @@ -203,7 +215,6 @@ class TestTransportGetCheckpointAction extends TransportGetCheckpointAction { protected void doExecute(Task task, Request request, ActionListener listener) { resolveIndicesAndGetCheckpoint(task, request, listener, clusterStateWithIndex); } - } class TestTransportGetCheckpointNodeAction extends TransportGetCheckpointNodeAction { @@ -239,7 +250,7 @@ protected void doExecute( ActionListener listener ) { ++calls; - getGlobalCheckpoints(mockIndicesService, request.getShards(), listener); + getGlobalCheckpoints(mockIndicesService, task, request.getShards(), request.getTimeout(), Clock.systemUTC(), listener); } public int getCalls() { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 81a719e24f633..61cc0e2c072ad 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -24,9 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -34,25 +32,17 @@ import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.indices.AssociatedIndexDescriptor; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.NamedXContentRegistry.Entry; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.SetResetModeActionRequest; @@ -229,27 +219,15 @@ public List getRestHandlers( } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + TransformConfigManager configManager = new IndexBasedTransformConfigManager( clusterService, - expressionResolver, + services.indexNameExpressionResolver(), client, - xContentRegistry + services.xContentRegistry() ); TransformAuditor auditor = new TransformAuditor( client, @@ -265,12 +243,12 @@ public Collection createComponents( configManager, auditor ); - TransformScheduler scheduler = new TransformScheduler(clock, threadPool, settings); + TransformScheduler scheduler = new TransformScheduler(clock, services.threadPool(), settings); scheduler.start(); transformServices.set(new TransformServices(configManager, checkpointService, auditor, scheduler)); - return Arrays.asList( + return List.of( transformServices.get(), new TransformClusterStateListener(clusterService, client), new TransformExtensionHolder(getTransformExtension()) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java index 8167305ef0cee..5acc2d4541559 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java @@ -26,8 +26,10 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.TransportRequestOptions; @@ -38,6 +40,8 @@ import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; +import java.time.Clock; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -87,7 +91,8 @@ protected void resolveIndicesAndGetCheckpoint(Task task, Request request, Action return; } - new AsyncGetCheckpointsFromNodesAction(state, task, nodesAndShards, new OriginalIndices(request), listener).start(); + new AsyncGetCheckpointsFromNodesAction(state, task, nodesAndShards, new OriginalIndices(request), request.getTimeout(), listener) + .start(); } private static Map> resolveIndicesToPrimaryShards(ClusterState state, String[] concreteIndices) { @@ -125,6 +130,7 @@ protected class AsyncGetCheckpointsFromNodesAction { private final ActionListener listener; private final Map> nodesAndShards; private final OriginalIndices originalIndices; + private final TimeValue timeout; private final DiscoveryNodes nodes; private final String localNodeId; @@ -133,12 +139,14 @@ protected AsyncGetCheckpointsFromNodesAction( Task task, Map> nodesAndShards, OriginalIndices originalIndices, + TimeValue timeout, ActionListener listener ) { this.task = task; this.listener = listener; this.nodesAndShards = nodesAndShards; this.originalIndices = originalIndices; + this.timeout = timeout; this.nodes = clusterState.nodes(); this.localNodeId = clusterService.localNode().getId(); } @@ -146,37 +154,32 @@ protected AsyncGetCheckpointsFromNodesAction( public void start() { GroupedActionListener groupedListener = new GroupedActionListener<>( nodesAndShards.size(), - ActionListener.wrap(responses -> { - // the final list should be ordered by key - Map checkpointsByIndexReduced = new TreeMap<>(); - - // merge the node responses - for (GetCheckpointNodeAction.Response response : responses) { - response.getCheckpoints().forEach((index, checkpoint) -> { - if (checkpointsByIndexReduced.containsKey(index)) { - long[] shardCheckpoints = checkpointsByIndexReduced.get(index); - for (int i = 0; i < checkpoint.length; ++i) { - shardCheckpoints[i] = Math.max(shardCheckpoints[i], checkpoint[i]); - } - } else { - checkpointsByIndexReduced.put(index, checkpoint); - } - }); - } - - listener.onResponse(new Response(checkpointsByIndexReduced)); - }, listener::onFailure) + ActionListener.wrap(responses -> listener.onResponse(mergeNodeResponses(responses)), listener::onFailure) ); for (Entry> oneNodeAndItsShards : nodesAndShards.entrySet()) { + if (task instanceof CancellableTask) { + // There is no point continuing this work if the task has been cancelled. + if (((CancellableTask) task).notifyIfCancelled(listener)) { + return; + } + } if (localNodeId.equals(oneNodeAndItsShards.getKey())) { - TransportGetCheckpointNodeAction.getGlobalCheckpoints(indicesService, oneNodeAndItsShards.getValue(), groupedListener); + TransportGetCheckpointNodeAction.getGlobalCheckpoints( + indicesService, + task, + oneNodeAndItsShards.getValue(), + timeout, + Clock.systemUTC(), + groupedListener + ); continue; } GetCheckpointNodeAction.Request nodeCheckpointsRequest = new GetCheckpointNodeAction.Request( oneNodeAndItsShards.getValue(), - originalIndices + originalIndices, + timeout ); DiscoveryNode node = nodes.get(oneNodeAndItsShards.getKey()); @@ -207,5 +210,26 @@ public void start() { ); } } + + private static Response mergeNodeResponses(Collection responses) { + // the final list should be ordered by key + Map checkpointsByIndexReduced = new TreeMap<>(); + + // merge the node responses + for (GetCheckpointNodeAction.Response response : responses) { + response.getCheckpoints().forEach((index, checkpoint) -> { + if (checkpointsByIndexReduced.containsKey(index)) { + long[] shardCheckpoints = checkpointsByIndexReduced.get(index); + for (int i = 0; i < checkpoint.length; ++i) { + shardCheckpoints[i] = Math.max(shardCheckpoints[i], checkpoint[i]); + } + } else { + checkpointsByIndexReduced.put(index, checkpoint); + } + }); + } + + return new Response(checkpointsByIndexReduced); + } } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java index 798191577cdb2..481fe40a764a6 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java @@ -6,22 +6,27 @@ */ package org.elasticsearch.xpack.transform.action; +import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Request; import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Response; +import java.time.Clock; +import java.time.Instant; import java.util.Arrays; import java.util.HashMap; import java.util.Map; @@ -43,12 +48,41 @@ public TransportGetCheckpointNodeAction( @Override protected void doExecute(Task task, Request request, ActionListener listener) { - getGlobalCheckpoints(indicesService, request.getShards(), listener); + getGlobalCheckpoints(indicesService, task, request.getShards(), request.getTimeout(), Clock.systemUTC(), listener); } - protected static void getGlobalCheckpoints(IndicesService indicesService, Set shards, ActionListener listener) { + protected static void getGlobalCheckpoints( + IndicesService indicesService, + Task task, + Set shards, + TimeValue timeout, + Clock clock, + ActionListener listener + ) { Map checkpointsByIndexOfThisNode = new HashMap<>(); + int numProcessedShards = 0; for (ShardId shardId : shards) { + if (task instanceof CancellableTask) { + // There is no point continuing this work if the task has been cancelled. + if (((CancellableTask) task).notifyIfCancelled(listener)) { + return; + } + } + if (timeout != null) { + Instant now = clock.instant(); + if (task.getStartTime() + timeout.millis() < now.toEpochMilli()) { + listener.onFailure( + new ElasticsearchTimeoutException( + "Transform checkpointing timed out on node [{}] after [{}] having processed [{}] of [{}] shards", + indicesService.clusterService().getNodeName(), + timeout.getStringRep(), + numProcessedShards, + shards.size() + ) + ); + return; + } + } final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); final IndexShard indexShard = indexService.getShard(shardId.id()); @@ -58,6 +92,7 @@ protected static void getGlobalCheckpoints(IndicesService indicesService, Set listener) { - final ClusterState state = clusterService.state(); - TransformNodes.warnIfNoTransformNodes(state); + final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); + final ClusterState clusterState = clusterService.state(); + TransformNodes.warnIfNoTransformNodes(clusterState); // Step 2: Search for all the transform tasks (matching the request) that *do not* have corresponding transform config. ActionListener> searchTransformConfigsListener = ActionListener.wrap(r -> { Set transformConfigIds = r.results().stream().map(TransformConfig::getId).collect(toSet()); Collection> transformTasks = TransformTask.findTransformTasks( request.getId(), - state + clusterState ); List errors = transformTasks.stream() .map(PersistentTasksCustomMetadata.PersistentTask::getId) @@ -88,7 +89,7 @@ protected void doExecute(Task task, Request request, ActionListener li }, listener::onFailure); // Step 1: Search for all the transform configs matching the request. - searchResources(request, new TaskId(clusterService.localNode().getId(), task.getId()), searchTransformConfigsListener); + searchResources(request, parentTaskId, searchTransformConfigsListener); } @Override diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java index 98343d5593dfa..13abc427460be 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetTransformStatsAction.java @@ -115,45 +115,55 @@ protected Response newResponse( } @Override - protected void taskOperation(CancellableTask actionTask, Request request, TransformTask task, ActionListener listener) { + protected void taskOperation( + CancellableTask actionTask, + Request request, + TransformTask transformTask, + ActionListener listener + ) { // Little extra insurance, make sure we only return transforms that aren't cancelled - ClusterState state = clusterService.state(); - String nodeId = state.nodes().getLocalNode().getId(); + ClusterState clusterState = clusterService.state(); + String nodeId = clusterState.nodes().getLocalNode().getId(); + final TaskId parentTaskId = new TaskId(nodeId, actionTask.getId()); - if (task.isCancelled() == false) { - task.getCheckpointingInfo( - transformCheckpointService, - ActionListener.wrap( - checkpointingInfo -> listener.onResponse(new Response(Collections.singletonList(deriveStats(task, checkpointingInfo)))), - e -> { - logger.warn("Failed to retrieve checkpointing info for transform [" + task.getTransformId() + "]", e); - listener.onResponse( - new Response( - Collections.singletonList(deriveStats(task, null)), - 1L, - Collections.emptyList(), - Collections.singletonList(new FailedNodeException(nodeId, "Failed to retrieve checkpointing info", e)) - ) - ); - } - ), - // at this point the transport already spend some time budget in `doExecute`, it is hard to tell what is left: - // recording the time spend would be complex and crosses machine boundaries, that's why we use a heuristic here - TimeValue.timeValueMillis( - (long) ((request.getTimeout() != null - ? request.getTimeout().millis() - : AcknowledgedRequest.DEFAULT_ACK_TIMEOUT.millis()) * CHECKPOINT_INFO_TIMEOUT_SHARE) - ) - ); - } else { + if (actionTask.notifyIfCancelled(listener)) { + return; + } + if (transformTask.isCancelled()) { listener.onResponse(new Response(Collections.emptyList())); + return; } + transformTask.getCheckpointingInfo( + transformCheckpointService, + new ParentTaskAssigningClient(client, parentTaskId), + ActionListener.wrap( + checkpointingInfo -> listener.onResponse( + new Response(Collections.singletonList(deriveStats(transformTask, checkpointingInfo))) + ), + e -> { + logger.warn("Failed to retrieve checkpointing info for transform [" + transformTask.getTransformId() + "]", e); + listener.onResponse( + new Response( + Collections.singletonList(deriveStats(transformTask, null)), + 1L, + Collections.emptyList(), + Collections.singletonList(new FailedNodeException(nodeId, "Failed to retrieve checkpointing info", e)) + ) + ); + } + ), + // at this point the transport already spend some time budget in `doExecute`, it is hard to tell what is left: + // recording the time spend would be complex and crosses machine boundaries, that's why we use a heuristic here + TimeValue.timeValueMillis( + (long) ((request.getTimeout() != null ? request.getTimeout().millis() : AcknowledgedRequest.DEFAULT_ACK_TIMEOUT.millis()) + * CHECKPOINT_INFO_TIMEOUT_SHARE) + ) + ); } @Override protected void doExecute(Task task, Request request, ActionListener finalListener) { - TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); - + final TaskId parentTaskId = new TaskId(clusterService.localNode().getId(), task.getId()); final ClusterState clusterState = clusterService.state(); TransformNodes.warnIfNoTransformNodes(clusterState); @@ -231,16 +241,16 @@ protected void doExecute(Task task, Request request, ActionListener fi private static void setNodeAttributes( TransformStats transformStats, PersistentTasksCustomMetadata persistentTasksCustomMetadata, - ClusterState state + ClusterState clusterState ) { var pTask = persistentTasksCustomMetadata.getTask(transformStats.getId()); if (pTask != null) { - transformStats.setNode(NodeAttributes.fromDiscoveryNode(state.nodes().get(pTask.getExecutorNode()))); + transformStats.setNode(NodeAttributes.fromDiscoveryNode(clusterState.nodes().get(pTask.getExecutorNode()))); } } - static TransformStats deriveStats(TransformTask task, @Nullable TransformCheckpointingInfo checkpointingInfo) { - TransformState transformState = task.getState(); + static TransformStats deriveStats(TransformTask transformTask, @Nullable TransformCheckpointingInfo checkpointingInfo) { + TransformState transformState = transformTask.getState(); TransformStats.State derivedState = TransformStats.State.fromComponents( transformState.getTaskState(), transformState.getIndexerState() @@ -253,13 +263,13 @@ static TransformStats deriveStats(TransformTask task, @Nullable TransformCheckpo reason = Strings.isNullOrEmpty(reason) ? "transform is set to stop at the next checkpoint" : reason; } return new TransformStats( - task.getTransformId(), + transformTask.getTransformId(), derivedState, reason, null, - task.getStats(), + transformTask.getStats(), checkpointingInfo == null ? TransformCheckpointingInfo.EMPTY : checkpointingInfo, - TransformHealthChecker.checkTransform(task, transformState.getAuthState()) + TransformHealthChecker.checkTransform(transformTask, transformState.getAuthState()) ); } @@ -290,6 +300,7 @@ private void collectStatsForTransformsWithoutTasks( List allStateAndStats = new ArrayList<>(response.getTransformsStats()); addCheckpointingInfoForTransformsWithoutTasks( parentTaskId, + request.getTimeout(), allStateAndStats, statsForTransformsWithoutTasks, transformsWaitingForAssignment, @@ -325,10 +336,12 @@ private void collectStatsForTransformsWithoutTasks( private void populateSingleStoppedTransformStat( TransformStoredDoc transform, TaskId parentTaskId, + TimeValue timeout, ActionListener listener ) { transformCheckpointService.getCheckpointingInfo( new ParentTaskAssigningClient(client, parentTaskId), + timeout, transform.getId(), transform.getTransformState().getCheckpoint(), transform.getTransformState().getPosition(), @@ -342,6 +355,7 @@ private void populateSingleStoppedTransformStat( private void addCheckpointingInfoForTransformsWithoutTasks( TaskId parentTaskId, + TimeValue timeout, List allStateAndStats, List statsForTransformsWithoutTasks, Set transformsWaitingForAssignment, @@ -358,7 +372,7 @@ private void addCheckpointingInfoForTransformsWithoutTasks( AtomicBoolean isExceptionReported = new AtomicBoolean(false); statsForTransformsWithoutTasks.forEach( - stat -> populateSingleStoppedTransformStat(stat, parentTaskId, ActionListener.wrap(checkpointingInfo -> { + stat -> populateSingleStoppedTransformStat(stat, parentTaskId, timeout, ActionListener.wrap(checkpointingInfo -> { synchronized (allStateAndStats) { if (transformsWaitingForAssignment.contains(stat.getId())) { Assignment assignment = TransformNodes.getAssignment(stat.getId(), clusterState); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 37dff75a76f6b..8e0a935ffaa53 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -210,7 +210,7 @@ private void getPreview( SyncConfig syncConfig, ActionListener listener ) { - Client parentTaskAssigningClient = new ParentTaskAssigningClient(client, parentTaskId); + Client parentTaskClient = new ParentTaskAssigningClient(client, parentTaskId); final SetOnce> mappings = new SetOnce<>(); @@ -279,7 +279,7 @@ private void getPreview( builder.endObject(); var pipelineRequest = new SimulatePipelineRequest(BytesReference.bytes(builder), XContentType.JSON); pipelineRequest.setId(pipeline); - parentTaskAssigningClient.execute(SimulatePipelineAction.INSTANCE, pipelineRequest, pipelineResponseActionListener); + parentTaskClient.execute(SimulatePipelineAction.INSTANCE, pipelineRequest, pipelineResponseActionListener); } } }, listener::onFailure); @@ -287,7 +287,7 @@ private void getPreview( ActionListener> deduceMappingsListener = ActionListener.wrap(deducedMappings -> { mappings.set(deducedMappings); function.preview( - parentTaskAssigningClient, + parentTaskClient, timeout, filteredHeaders, source, @@ -297,6 +297,6 @@ private void getPreview( ); }, listener::onFailure); - function.deduceMappings(parentTaskAssigningClient, filteredHeaders, source, deduceMappingsListener); + function.deduceMappings(parentTaskClient, filteredHeaders, source, deduceMappingsListener); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointProvider.java index 3aa7116c3aa74..cae0ca07957d0 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/CheckpointProvider.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.transform.checkpoint; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo.TransformCheckpointingInfoBuilder; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition; @@ -50,6 +51,7 @@ void getCheckpointingInfo( TransformCheckpoint nextCheckpoint, TransformIndexerPosition nextCheckpointPosition, TransformProgress nextCheckpointProgress, + TimeValue timeout, ActionListener listener ); @@ -67,6 +69,7 @@ void getCheckpointingInfo( long lastCheckpointNumber, TransformIndexerPosition nextCheckpointPosition, TransformProgress nextCheckpointProgress, + TimeValue timeout, ActionListener listener ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java index 86ef29b2af370..aa1332b95fe84 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java @@ -18,10 +18,11 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Strings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xpack.core.ClientHelper; @@ -55,10 +56,15 @@ class DefaultCheckpointProvider implements CheckpointProvider { // threshold when to audit concrete index names, above this threshold we only report the number of changes private static final int AUDIT_CONCRETED_SOURCE_INDEX_CHANGES = 10; + // Huge timeout for getting index checkpoints internally. + // It might help to release cluster resources earlier if e.g.: someone configures a transform that ends up checkpointing 100000 + // searchable snapshot indices that all have to be retrieved from blob storage. + protected static final TimeValue INTERNAL_GET_INDEX_CHECKPOINTS_TIMEOUT = TimeValue.timeValueHours(12); + private static final Logger logger = LogManager.getLogger(DefaultCheckpointProvider.class); protected final Clock clock; - protected final Client client; + protected final ParentTaskAssigningClient client; protected final RemoteClusterResolver remoteClusterResolver; protected final TransformConfigManager transformConfigManager; protected final TransformAuditor transformAuditor; @@ -69,7 +75,7 @@ class DefaultCheckpointProvider implements CheckpointProvider { DefaultCheckpointProvider( final Clock clock, - final Client client, + final ParentTaskAssigningClient client, final RemoteClusterResolver remoteClusterResolver, final TransformConfigManager transformConfigManager, final TransformAuditor transformAuditor, @@ -93,7 +99,7 @@ public void createNextCheckpoint(final TransformCheckpoint lastCheckpoint, final final long timestamp = clock.millis(); final long checkpoint = TransformCheckpoint.isNullOrEmpty(lastCheckpoint) ? 1 : lastCheckpoint.getCheckpoint() + 1; - getIndexCheckpoints(ActionListener.wrap(checkpointsByIndex -> { + getIndexCheckpoints(INTERNAL_GET_INDEX_CHECKPOINTS_TIMEOUT, ActionListener.wrap(checkpointsByIndex -> { reportSourceIndexChanges( TransformCheckpoint.isNullOrEmpty(lastCheckpoint) ? Collections.emptySet() @@ -105,7 +111,7 @@ public void createNextCheckpoint(final TransformCheckpoint lastCheckpoint, final }, listener::onFailure)); } - protected void getIndexCheckpoints(ActionListener> listener) { + protected void getIndexCheckpoints(TimeValue timeout, ActionListener> listener) { try { ResolvedIndices resolvedIndexes = remoteClusterResolver.resolve(transformConfig.getSource().getIndex()); ActionListener> groupedListener = listener; @@ -125,6 +131,7 @@ protected void getIndexCheckpoints(ActionListener> listener) if (resolvedIndexes.getLocalIndices().isEmpty() == false) { getCheckpointsFromOneCluster( client, + timeout, transformConfig.getHeaders(), resolvedIndexes.getLocalIndices().toArray(new String[0]), RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY, @@ -133,9 +140,13 @@ protected void getIndexCheckpoints(ActionListener> listener) } for (Map.Entry> remoteIndex : resolvedIndexes.getRemoteIndicesPerClusterAlias().entrySet()) { - Client remoteClient = client.getRemoteClusterClient(remoteIndex.getKey(), EsExecutors.DIRECT_EXECUTOR_SERVICE); + ParentTaskAssigningClient remoteClient = new ParentTaskAssigningClient( + client.getRemoteClusterClient(remoteIndex.getKey(), EsExecutors.DIRECT_EXECUTOR_SERVICE), + client.getParentTask() + ); getCheckpointsFromOneCluster( remoteClient, + timeout, transformConfig.getHeaders(), remoteIndex.getValue().toArray(new String[0]), remoteIndex.getKey(), @@ -148,16 +159,17 @@ protected void getIndexCheckpoints(ActionListener> listener) } private void getCheckpointsFromOneCluster( - Client client, + ParentTaskAssigningClient client, + TimeValue timeout, Map headers, String[] indices, String cluster, ActionListener> listener ) { if (fallbackToBWC.contains(cluster)) { - getCheckpointsFromOneClusterBWC(client, headers, indices, cluster, listener); + getCheckpointsFromOneClusterBWC(client, timeout, headers, indices, cluster, listener); } else { - getCheckpointsFromOneClusterV2(client, headers, indices, cluster, ActionListener.wrap(response -> { + getCheckpointsFromOneClusterV2(client, timeout, headers, indices, cluster, ActionListener.wrap(response -> { logger.debug( "[{}] Successfully retrieved checkpoints from cluster [{}] using transform checkpoint API", transformConfig.getId(), @@ -175,7 +187,7 @@ private void getCheckpointsFromOneCluster( ); fallbackToBWC.add(cluster); - getCheckpointsFromOneClusterBWC(client, headers, indices, cluster, listener); + getCheckpointsFromOneClusterBWC(client, timeout, headers, indices, cluster, listener); } else { listener.onFailure(e); } @@ -184,14 +196,18 @@ private void getCheckpointsFromOneCluster( } private static void getCheckpointsFromOneClusterV2( - Client client, + ParentTaskAssigningClient client, + TimeValue timeout, Map headers, String[] indices, String cluster, ActionListener> listener ) { - GetCheckpointAction.Request getCheckpointRequest = new GetCheckpointAction.Request(indices, IndicesOptions.LENIENT_EXPAND_OPEN); - + GetCheckpointAction.Request getCheckpointRequest = new GetCheckpointAction.Request( + indices, + IndicesOptions.LENIENT_EXPAND_OPEN, + timeout + ); ActionListener checkpointListener; if (RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY.equals(cluster)) { checkpointListener = ActionListener.wrap( @@ -230,7 +246,8 @@ private static void getCheckpointsFromOneClusterV2( * BWC fallback for nodes/cluster older than 8.2 */ private static void getCheckpointsFromOneClusterBWC( - Client client, + ParentTaskAssigningClient client, + TimeValue timeout, Map headers, String[] indices, String cluster, @@ -256,7 +273,7 @@ private static void getCheckpointsFromOneClusterBWC( client, ClientHelper.TRANSFORM_ORIGIN, IndicesStatsAction.INSTANCE, - new IndicesStatsRequest().indices(indices).clear().indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN), + new IndicesStatsRequest().indices(indices).timeout(timeout).clear().indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN), ActionListener.wrap(response -> { if (response.getFailedShards() != 0) { for (int i = 0; i < response.getShardFailures().length; ++i) { @@ -347,6 +364,7 @@ public void getCheckpointingInfo( TransformCheckpoint nextCheckpoint, TransformIndexerPosition nextCheckpointPosition, TransformProgress nextCheckpointProgress, + TimeValue timeout, ActionListener listener ) { TransformCheckpointingInfo.TransformCheckpointingInfoBuilder checkpointingInfoBuilder = @@ -359,7 +377,7 @@ public void getCheckpointingInfo( long timestamp = clock.millis(); - getIndexCheckpoints(ActionListener.wrap(checkpointsByIndex -> { + getIndexCheckpoints(timeout, ActionListener.wrap(checkpointsByIndex -> { TransformCheckpoint sourceCheckpoint = new TransformCheckpoint(transformConfig.getId(), timestamp, -1L, checkpointsByIndex, 0L); checkpointingInfoBuilder.setSourceCheckpoint(sourceCheckpoint); checkpointingInfoBuilder.setOperationsBehind(TransformCheckpoint.getBehind(lastCheckpoint, sourceCheckpoint)); @@ -372,6 +390,7 @@ public void getCheckpointingInfo( long lastCheckpointNumber, TransformIndexerPosition nextCheckpointPosition, TransformProgress nextCheckpointProgress, + TimeValue timeout, ActionListener listener ) { @@ -398,7 +417,7 @@ public void getCheckpointingInfo( // <2> got the next checkpoint, get the source checkpoint ActionListener nextCheckpointListener = ActionListener.wrap(nextCheckpointObj -> { checkpointingInfoBuilder.setNextCheckpoint(nextCheckpointObj); - getIndexCheckpoints(checkpointsByIndexListener); + getIndexCheckpoints(timeout, checkpointsByIndexListener); }, e -> { logger.debug( () -> format("[%s] failed to retrieve next checkpoint [%s]", transformConfig.getId(), lastCheckpointNumber + 1), @@ -420,7 +439,7 @@ public void getCheckpointingInfo( if (lastCheckpointNumber != 0) { transformConfigManager.getTransformCheckpoint(transformConfig.getId(), lastCheckpointNumber, lastCheckpointListener); } else { - getIndexCheckpoints(checkpointsByIndexListener); + getIndexCheckpoints(timeout, checkpointsByIndexListener); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java index a30f3269ca96f..7b83af1dc1405 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java @@ -13,7 +13,7 @@ import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -45,7 +45,7 @@ class TimeBasedCheckpointProvider extends DefaultCheckpointProvider { TimeBasedCheckpointProvider( final Clock clock, - final Client client, + final ParentTaskAssigningClient client, final RemoteClusterResolver remoteClusterResolver, final TransformConfigManager transformConfigManager, final TransformAuditor transformAuditor, @@ -95,7 +95,7 @@ public void createNextCheckpoint(final TransformCheckpoint lastCheckpoint, final // for time based synchronization final long timeUpperBound = alignTimestamp.apply(timestamp - timeSyncConfig.getDelay().millis()); - getIndexCheckpoints(ActionListener.wrap(checkpointsByIndex -> { + getIndexCheckpoints(INTERNAL_GET_INDEX_CHECKPOINTS_TIMEOUT, ActionListener.wrap(checkpointsByIndex -> { listener.onResponse( new TransformCheckpoint(transformConfig.getId(), timestamp, checkpoint, checkpointsByIndex, timeUpperBound) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointService.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointService.java index 4527b6039bcad..0006a79b6a2b8 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointService.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointService.java @@ -10,9 +10,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo.TransformCheckpointingInfoBuilder; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; @@ -53,7 +54,7 @@ public TransformCheckpointService( this.remoteClusterResolver = new RemoteClusterResolver(settings, clusterService.getClusterSettings()); } - public CheckpointProvider getCheckpointProvider(final Client client, final TransformConfig transformConfig) { + public CheckpointProvider getCheckpointProvider(final ParentTaskAssigningClient client, final TransformConfig transformConfig) { if (transformConfig.getSyncConfig() instanceof TimeSyncConfig) { return new TimeBasedCheckpointProvider( clock, @@ -85,7 +86,8 @@ public CheckpointProvider getCheckpointProvider(final Client client, final Trans * @param listener listener to retrieve the result */ public void getCheckpointingInfo( - final Client client, + final ParentTaskAssigningClient client, + final TimeValue timeout, final String transformId, final long lastCheckpointNumber, final TransformIndexerPosition nextCheckpointPosition, @@ -99,6 +101,7 @@ public void getCheckpointingInfo( lastCheckpointNumber, nextCheckpointPosition, nextCheckpointProgress, + timeout, listener ); }, transformError -> { diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index 00fa7f200a3c3..45bef4650640a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -27,7 +27,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.master.AcknowledgedRequest; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; @@ -75,7 +75,7 @@ class ClientTransformIndexer extends TransformIndexer { private static final TimeValue PIT_KEEP_ALIVE = TimeValue.timeValueSeconds(30); private static final Logger logger = LogManager.getLogger(ClientTransformIndexer.class); - private final Client client; + private final ParentTaskAssigningClient client; private final AtomicBoolean oldStatsCleanedUp = new AtomicBoolean(false); private final AtomicReference seqNoPrimaryTermAndIndexHolder; @@ -89,7 +89,7 @@ class ClientTransformIndexer extends TransformIndexer { CheckpointProvider checkpointProvider, AtomicReference initialState, TransformIndexerPosition initialPosition, - Client client, + ParentTaskAssigningClient client, TransformIndexerStats initialStats, TransformConfig transformConfig, TransformProgress transformProgress, diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java index 9294aef87526d..cf6c50ec60faf 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformIndexer.java @@ -218,10 +218,6 @@ public TransformCheckpoint getNextCheckpoint() { return nextCheckpoint; } - public CheckpointProvider getCheckpointProvider() { - return checkpointProvider; - } - /** * Request a checkpoint */ diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java index 0d2ce26363298..6a8a8c8548491 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutor.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -181,6 +182,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa final String transformId = params.getId(); final TransformTask buildTask = (TransformTask) task; + final ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, buildTask.getParentTaskId()); // NOTE: TransformPersistentTasksExecutor#createTask pulls in the stored task state from the ClusterState when the object // is created. TransformTask#ctor takes into account setting the task as failed if that is passed in with the // persisted state. @@ -189,7 +191,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa // // We want the rest of the state to be populated in the task when it is loaded on the node so that users can force start it again // later if they want. - final ClientTransformIndexerBuilder indexerBuilder = new ClientTransformIndexerBuilder().setClient(buildTask.getParentTaskClient()) + final ClientTransformIndexerBuilder indexerBuilder = new ClientTransformIndexerBuilder().setClient(parentTaskClient) .setTransformServices(transformServices); final SetOnce stateHolder = new SetOnce<>(); @@ -346,7 +348,7 @@ protected void nodeOperation(AllocatedPersistentTask task, @Nullable TransformTa // <1> Check the latest internal index (IMPORTANT: according to _this_ node, which might be newer than master) is installed TransformInternalIndex.createLatestVersionedIndexIfRequired( clusterService, - buildTask.getParentTaskClient(), + parentTaskClient, transformInternalIndexAdditionalSettings, templateCheckListener ); @@ -420,7 +422,6 @@ protected AllocatedPersistentTask createTask( type, action, parentTaskId, - client, persistentTask.getParams(), (TransformState) persistentTask.getState(), transformServices.getScheduler(), diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index c2affd3881e27..753d61410d5a8 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -12,10 +12,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ListenerTimeouts; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Strings; @@ -63,7 +60,6 @@ public class TransformTask extends AllocatedPersistentTask implements TransformS private static final Logger logger = LogManager.getLogger(TransformTask.class); private static final IndexerState[] RUNNING_STATES = new IndexerState[] { IndexerState.STARTED, IndexerState.INDEXING }; - private final ParentTaskAssigningClient parentTaskClient; private final TransformTaskParams transform; private final TransformScheduler transformScheduler; private final ThreadPool threadPool; @@ -79,7 +75,6 @@ public TransformTask( String type, String action, TaskId parentTask, - Client client, TransformTaskParams transform, TransformState state, TransformScheduler transformScheduler, @@ -88,7 +83,6 @@ public TransformTask( Map headers ) { super(id, type, action, TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + transform.getId(), parentTask, headers); - this.parentTaskClient = new ParentTaskAssigningClient(client, parentTask); this.transform = transform; this.transformScheduler = transformScheduler; this.threadPool = threadPool; @@ -125,10 +119,6 @@ public TransformTask( } } - public ParentTaskAssigningClient getParentTaskClient() { - return parentTaskClient; - } - public String getTransformId() { return transform.getId(); } @@ -183,32 +173,25 @@ public TransformIndexerStats getStats() { public void getCheckpointingInfo( TransformCheckpointService transformsCheckpointService, + ParentTaskAssigningClient parentTaskClient, ActionListener listener, TimeValue timeout ) { - ActionListener checkPointInfoListener = ListenerTimeouts.wrapWithTimeout( - threadPool, - timeout, - threadPool.generic(), - ActionListener.wrap(infoBuilder -> { - if (context.getChangesLastDetectedAt() != null) { - infoBuilder.setChangesLastDetectedAt(context.getChangesLastDetectedAt()); - } - if (context.getLastSearchTime() != null) { - infoBuilder.setLastSearchTime(context.getLastSearchTime()); - } - listener.onResponse(infoBuilder.build()); - }, listener::onFailure), - (ignore) -> listener.onFailure( - new ElasticsearchTimeoutException(format("Timed out retrieving checkpointing info after [%s]", timeout)) - ) - ); + ActionListener checkPointInfoListener = ActionListener.wrap(infoBuilder -> { + if (context.getChangesLastDetectedAt() != null) { + infoBuilder.setChangesLastDetectedAt(context.getChangesLastDetectedAt()); + } + if (context.getLastSearchTime() != null) { + infoBuilder.setLastSearchTime(context.getLastSearchTime()); + } + listener.onResponse(infoBuilder.build()); + }, listener::onFailure); - // TODO: pass `timeout` to the lower layers ClientTransformIndexer transformIndexer = getIndexer(); if (transformIndexer == null) { transformsCheckpointService.getCheckpointingInfo( parentTaskClient, + timeout, transform.getId(), context.getCheckpoint(), initialPosition, @@ -217,12 +200,13 @@ public void getCheckpointingInfo( ); return; } - transformIndexer.getCheckpointProvider() + transformsCheckpointService.getCheckpointProvider(parentTaskClient, transformIndexer.getConfig()) .getCheckpointingInfo( transformIndexer.getLastCheckpoint(), transformIndexer.getNextCheckpoint(), transformIndexer.getPosition(), transformIndexer.getProgress(), + timeout, checkPointInfoListener ); } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java new file mode 100644 index 0000000000000..25c7f9efa7992 --- /dev/null +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeActionTests.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.action; + +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskCancelHelper; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; +import org.elasticsearch.xpack.transform.transforms.scheduling.FakeClock; +import org.junit.Before; + +import java.time.Duration; +import java.time.Instant; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_VERSION_CREATED; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransportGetCheckpointNodeActionTests extends ESTestCase { + + private static final String NODE_NAME = "dummy-node"; + + private IndicesService indicesService; + private CancellableTask task; + private FakeClock clock; + private Set shards; + + @Before + public void setUp() throws Exception { + super.setUp(); + ClusterService clusterService = new ClusterService( + Settings.builder().put("node.name", NODE_NAME).build(), + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + null, + (TaskManager) null + ); + IndexShard indexShardA0 = mock(IndexShard.class); + when(indexShardA0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_000)); + IndexShard indexShardA1 = mock(IndexShard.class); + when(indexShardA1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 3_001)); + IndexShard indexShardB0 = mock(IndexShard.class); + when(indexShardB0.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_000)); + IndexShard indexShardB1 = mock(IndexShard.class); + when(indexShardB1.seqNoStats()).thenReturn(new SeqNoStats(3_000, 2_000, 4_001)); + Settings commonIndexSettings = Settings.builder() + .put(SETTING_VERSION_CREATED, 1_000_000) + .put(SETTING_NUMBER_OF_SHARDS, 2) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + IndexService indexServiceA = mock(IndexService.class); + when(indexServiceA.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-A").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceA.getShard(0)).thenReturn(indexShardA0); + when(indexServiceA.getShard(1)).thenReturn(indexShardA1); + IndexService indexServiceB = mock(IndexService.class); + when(indexServiceB.getIndexSettings()).thenReturn( + new IndexSettings(IndexMetadata.builder("my-index-B").settings(commonIndexSettings).build(), Settings.EMPTY) + ); + when(indexServiceB.getShard(0)).thenReturn(indexShardB0); + when(indexServiceB.getShard(1)).thenReturn(indexShardB1); + indicesService = mock(IndicesService.class); + when(indicesService.clusterService()).thenReturn(clusterService); + when(indicesService.indexServiceSafe(new Index("my-index-A", "A"))).thenReturn(indexServiceA); + when(indicesService.indexServiceSafe(new Index("my-index-B", "B"))).thenReturn(indexServiceB); + + task = new CancellableTask(123, "type", "action", "description", new TaskId("dummy-node:456"), Map.of()); + clock = new FakeClock(Instant.now()); + shards = Set.of( + new ShardId(new Index("my-index-A", "A"), 0), + new ShardId(new Index("my-index-A", "A"), 1), + new ShardId(new Index("my-index-B", "B"), 0), + new ShardId(new Index("my-index-B", "B"), 1) + ); + } + + public void testGetGlobalCheckpointsWithNoTimeout() throws InterruptedException { + testGetGlobalCheckpointsSuccess(null); + } + + public void testGetGlobalCheckpointsWithHighTimeout() throws InterruptedException { + testGetGlobalCheckpointsSuccess(TimeValue.timeValueMinutes(1)); + } + + private void testGetGlobalCheckpointsSuccess(TimeValue timeout) throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + SetOnce responseHolder = new SetOnce<>(); + SetOnce exceptionHolder = new SetOnce<>(); + TransportGetCheckpointNodeAction.getGlobalCheckpoints(indicesService, task, shards, timeout, clock, ActionListener.wrap(r -> { + responseHolder.set(r); + latch.countDown(); + }, e -> { + exceptionHolder.set(e); + latch.countDown(); + })); + latch.await(10, TimeUnit.SECONDS); + + Map checkpoints = responseHolder.get().getCheckpoints(); + assertThat(checkpoints.keySet(), containsInAnyOrder("my-index-A", "my-index-B")); + assertThat(LongStream.of(checkpoints.get("my-index-A")).boxed().collect(Collectors.toList()), contains(3000L, 3001L)); + assertThat(LongStream.of(checkpoints.get("my-index-B")).boxed().collect(Collectors.toList()), contains(4000L, 4001L)); + assertThat(exceptionHolder.get(), is(nullValue())); + } + + public void testGetGlobalCheckpointsFailureDueToTaskCancelled() throws InterruptedException { + TaskCancelHelper.cancel(task, "due to apocalypse"); + + CountDownLatch latch = new CountDownLatch(1); + SetOnce responseHolder = new SetOnce<>(); + SetOnce exceptionHolder = new SetOnce<>(); + TransportGetCheckpointNodeAction.getGlobalCheckpoints(indicesService, task, shards, null, clock, ActionListener.wrap(r -> { + responseHolder.set(r); + latch.countDown(); + }, e -> { + exceptionHolder.set(e); + latch.countDown(); + })); + latch.await(10, TimeUnit.SECONDS); + + assertThat("Response was: " + responseHolder.get(), responseHolder.get(), is(nullValue())); + assertThat(exceptionHolder.get().getMessage(), is(equalTo("task cancelled [due to apocalypse]"))); + } + + public void testGetGlobalCheckpointsFailureDueToTimeout() throws InterruptedException { + // Move the current time past the timeout. + clock.advanceTimeBy(Duration.ofSeconds(10)); + + CountDownLatch latch = new CountDownLatch(1); + SetOnce responseHolder = new SetOnce<>(); + SetOnce exceptionHolder = new SetOnce<>(); + TransportGetCheckpointNodeAction.getGlobalCheckpoints( + indicesService, + task, + shards, + TimeValue.timeValueSeconds(5), + clock, + ActionListener.wrap(r -> { + responseHolder.set(r); + latch.countDown(); + }, e -> { + exceptionHolder.set(e); + latch.countDown(); + }) + ); + latch.await(10, TimeUnit.SECONDS); + + assertThat("Response was: " + responseHolder.get(), responseHolder.get(), is(nullValue())); + assertThat( + exceptionHolder.get().getMessage(), + is(equalTo("Transform checkpointing timed out on node [dummy-node] after [5s] having processed [0] of [4] shards")) + ); + } +} diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java index 5d05d89aacdce..2457e2719c0ee 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProviderTests.java @@ -19,11 +19,13 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.MockLogAppender.LoggingExpectation; @@ -68,6 +70,7 @@ public class DefaultCheckpointProviderTests extends ESTestCase { private Clock clock; private Client client; + private ParentTaskAssigningClient parentTaskClient; private Client remoteClient1; private Client remoteClient2; private Client remoteClient3; @@ -81,6 +84,7 @@ public void setUpMocks() { when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); + parentTaskClient = new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")); remoteClient1 = mock(Client.class); when(remoteClient1.threadPool()).thenReturn(threadPool); remoteClient2 = mock(Client.class); @@ -253,7 +257,7 @@ public void testHandlingShardFailures() throws Exception { DefaultCheckpointProvider provider = new DefaultCheckpointProvider( clock, - client, + parentTaskClient, remoteClusterResolver, transformConfigManager, transformAuditor, @@ -319,7 +323,7 @@ public void testCreateNextCheckpointWithRemoteClient() throws InterruptedExcepti DefaultCheckpointProvider provider = new DefaultCheckpointProvider( clock, - client, + parentTaskClient, remoteClusterResolver, transformConfigManager, transformAuditor, @@ -370,7 +374,7 @@ public void testCreateNextCheckpointWithRemoteClients() throws InterruptedExcept DefaultCheckpointProvider provider = new DefaultCheckpointProvider( clock, - client, + parentTaskClient, remoteClusterResolver, transformConfigManager, transformAuditor, @@ -397,7 +401,7 @@ public void testCreateNextCheckpointWithRemoteClients() throws InterruptedExcept private DefaultCheckpointProvider newCheckpointProvider(TransformConfig transformConfig) { return new DefaultCheckpointProvider( clock, - client, + parentTaskClient, new RemoteClusterResolver(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), transformConfigManager, transformAuditor, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/MockTimebasedCheckpointProvider.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/MockTimebasedCheckpointProvider.java index b874cbf9140b5..79aa34eebc882 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/MockTimebasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/MockTimebasedCheckpointProvider.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.transform.checkpoint; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo.TransformCheckpointingInfoBuilder; @@ -65,6 +66,7 @@ public void getCheckpointingInfo( TransformCheckpoint nextCheckpoint, TransformIndexerPosition nextCheckpointPosition, TransformProgress nextCheckpointProgress, + TimeValue timeout, ActionListener listener ) { TransformCheckpointingInfoBuilder checkpointingInfoBuilder = new TransformCheckpointingInfoBuilder(); @@ -85,6 +87,7 @@ public void getCheckpointingInfo( long lastCheckpointNumber, TransformIndexerPosition nextCheckpointPosition, TransformProgress nextCheckpointProgress, + TimeValue timeout, ActionListener listener ) { long timestamp = System.currentTimeMillis(); @@ -106,7 +109,7 @@ public void getCheckpointingInfo( timestamp - timeSyncConfig.getDelay().millis() ); - getCheckpointingInfo(lastCheckpoint, nextCheckpoint, nextCheckpointPosition, nextCheckpointProgress, listener); + getCheckpointingInfo(lastCheckpoint, nextCheckpoint, nextCheckpointPosition, nextCheckpointProgress, timeout, listener); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java index 05c1a372dc471..5f1c0e6bb7f76 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -26,6 +27,7 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; @@ -71,6 +73,7 @@ public class TimeBasedCheckpointProviderTests extends ESTestCase { private Clock clock; private Client client; + private ParentTaskAssigningClient parentTaskClient; private IndexBasedTransformConfigManager transformConfigManager; private MockTransformAuditor transformAuditor; @@ -78,10 +81,11 @@ public class TimeBasedCheckpointProviderTests extends ESTestCase { public void setUpMocks() { clock = mock(Clock.class); when(clock.millis()).thenReturn(123456789L); - client = mock(Client.class); ThreadPool threadPool = mock(ThreadPool.class); when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); + client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); + parentTaskClient = new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")); transformConfigManager = mock(IndexBasedTransformConfigManager.class); transformAuditor = MockTransformAuditor.createMockAuditor(); } @@ -278,7 +282,7 @@ private void testCreateNextCheckpoint( private TimeBasedCheckpointProvider newCheckpointProvider(TransformConfig transformConfig) { return new TimeBasedCheckpointProvider( clock, - client, + parentTaskClient, new RemoteClusterResolver(Settings.EMPTY, new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)), transformConfigManager, transformAuditor, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 09b46c72c2ead..a850c7beef7dd 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; @@ -34,6 +34,7 @@ import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; @@ -138,7 +139,7 @@ public void testPitInjection() throws InterruptedException { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, @@ -231,7 +232,7 @@ public void testPitInjectionIfPitNotSupported() throws InterruptedException { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, @@ -307,7 +308,7 @@ public void testDisablePit() throws InterruptedException { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, @@ -370,7 +371,7 @@ public void testDisablePitWhenThereIsRemoteIndexInSource() throws InterruptedExc mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, @@ -413,7 +414,7 @@ public void testDisablePitWhenThereIsRemoteIndexInSource() throws InterruptedExc public void testHandlePitIndexNotFound() throws InterruptedException { // simulate a deleted index due to ILM try (PitMockClient client = new PitMockClient(getTestName(), true)) { - ClientTransformIndexer indexer = createTestIndexer(client); + ClientTransformIndexer indexer = createTestIndexer(new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456"))); SearchRequest searchRequest = new SearchRequest("deleted-index"); searchRequest.source().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id")); Tuple namedSearchRequest = new Tuple<>("test-handle-pit-index-not-found", searchRequest); @@ -425,7 +426,7 @@ public void testHandlePitIndexNotFound() throws InterruptedException { // simulate a deleted index that is essential, search must fail (after a retry without pit) try (PitMockClient client = new PitMockClient(getTestName(), true)) { - ClientTransformIndexer indexer = createTestIndexer(client); + ClientTransformIndexer indexer = createTestIndexer(new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456"))); SearchRequest searchRequest = new SearchRequest("essential-deleted-index"); searchRequest.source().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id")); Tuple namedSearchRequest = new Tuple<>("test-handle-pit-index-not-found", searchRequest); @@ -444,7 +445,7 @@ private static class MockClientTransformIndexer extends ClientTransformIndexer { CheckpointProvider checkpointProvider, AtomicReference initialState, TransformIndexerPosition initialPosition, - Client client, + ParentTaskAssigningClient client, TransformIndexerStats initialStats, TransformConfig transformConfig, TransformProgress transformProgress, @@ -582,7 +583,7 @@ private ClientTransformIndexer createTestIndexer() { return createTestIndexer(null); } - private ClientTransformIndexer createTestIndexer(Client client) { + private ClientTransformIndexer createTestIndexer(ParentTaskAssigningClient client) { ThreadPool threadPool = mock(ThreadPool.class); when(threadPool.executor("generic")).thenReturn(mock(ExecutorService.class)); @@ -597,7 +598,7 @@ private ClientTransformIndexer createTestIndexer(Client client) { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client == null ? mock(Client.class) : client, + client == null ? mock(ParentTaskAssigningClient.class) : client, mock(TransformIndexerStats.class), TransformConfigTests.randomTransformConfig(), null, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index 424514b99f683..a460dca4a1b41 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.breaker.CircuitBreaker.Durability; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Settings; @@ -135,7 +136,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { checkpointProvider, initialState, initialPosition, - mock(Client.class), + mock(ParentTaskAssigningClient.class), jobStats, transformConfig, /* TransformProgress */ null, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java index afa10afbdf638..aeb94cd2c2f66 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java @@ -11,10 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; @@ -63,7 +65,7 @@ private static class MockClientTransformIndexer extends ClientTransformIndexer { CheckpointProvider checkpointProvider, AtomicReference initialState, TransformIndexerPosition initialPosition, - Client client, + ParentTaskAssigningClient client, TransformIndexerStats initialStats, TransformConfig transformConfig, TransformProgress transformProgress, @@ -220,7 +222,7 @@ public void fail(String failureMessage, ActionListener listener) { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, @@ -302,7 +304,7 @@ public void fail(String failureMessage, ActionListener listener) { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, @@ -432,7 +434,7 @@ public void fail(String failureMessage, ActionListener listener) { mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), null, - client, + new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456")), mock(TransformIndexerStats.class), config, null, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java index 2ee2ed051a7d2..4d5807913636d 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java @@ -129,7 +129,6 @@ public void testStopOnFailedTaskWithStoppedIndexer() { "some_type", "some_action", TaskId.EMPTY_TASK_ID, - client, createTransformTaskParams(transformConfig.getId()), transformState, new TransformScheduler(clock, threadPool, Settings.EMPTY), @@ -208,7 +207,6 @@ public void testStopOnFailedTaskWithoutIndexer() { "some_type", "some_action", TaskId.EMPTY_TASK_ID, - client, createTransformTaskParams(transformConfig.getId()), transformState, new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY), @@ -428,7 +426,6 @@ public void testApplyNewAuthState() { "some_type", "some_action", TaskId.EMPTY_TASK_ID, - client, createTransformTaskParams(transformConfig.getId()), transformState, new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/FakeClock.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/FakeClock.java index 670f88b5195b2..4af9b8cf0ede0 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/FakeClock.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/FakeClock.java @@ -16,11 +16,11 @@ /** * {@link FakeClock} class in a test implementation of {@link Clock} and provides the possibility to set arbitrary current time. */ -class FakeClock extends Clock { +public class FakeClock extends Clock { private Instant currentTime; - FakeClock(Instant time) { + public FakeClock(Instant time) { currentTime = Objects.requireNonNull(time); } diff --git a/x-pack/plugin/vector-tile/qa/multi-cluster/build.gradle b/x-pack/plugin/vector-tile/qa/multi-cluster/build.gradle index ea20c8eacd172..30b032c1cae1a 100644 --- a/x-pack/plugin/vector-tile/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/vector-tile/qa/multi-cluster/build.gradle @@ -6,7 +6,7 @@ */ import org.elasticsearch.gradle.testclusters.DefaultTestClustersTask - +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { @@ -27,7 +27,7 @@ def localCluster = testClusters.register('local') { setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' setting 'cluster.remote.other.seeds', - { "\"${remoteCluster.get().getAllTransportPortURI().join(",")}\"" } + { "\"${remoteCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE } diff --git a/x-pack/plugin/vector-tile/src/javaRestTest/java/org/elasticsearch/xpack/vectortile/VectorTileRestIT.java b/x-pack/plugin/vector-tile/src/javaRestTest/java/org/elasticsearch/xpack/vectortile/VectorTileRestIT.java index 1cb77ec45199f..016bfaabec0ff 100644 --- a/x-pack/plugin/vector-tile/src/javaRestTest/java/org/elasticsearch/xpack/vectortile/VectorTileRestIT.java +++ b/x-pack/plugin/vector-tile/src/javaRestTest/java/org/elasticsearch/xpack/vectortile/VectorTileRestIT.java @@ -579,7 +579,6 @@ public void testInvalidAggName() { assertThat(ex.getMessage(), Matchers.containsString("Invalid aggregation name [_mvt_name]")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/101038") public void testCentroidGridTypeOnPolygon() throws Exception { final Request mvtRequest = new Request(getHttpMethod(), INDEX_POLYGON + "/_mvt/location/" + (z + 2) + "/" + 4 * x + "/" + 4 * y); mvtRequest.setJsonEntity("{\"size\" : 0, \"grid_type\": \"centroid\", \"grid_precision\": 2}"); diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/GridType.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/GridType.java index aa5d5a75fe4c0..517e68a2b7b08 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/GridType.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/GridType.java @@ -16,6 +16,9 @@ import java.io.IOException; import java.util.Locale; +import static org.elasticsearch.common.geo.GeoUtils.quantizeLatUp; +import static org.elasticsearch.common.geo.GeoUtils.quantizeLonDown; + /** * Enum containing the basic geometry types for serializing {@link InternalGeoGridBucket} */ @@ -42,8 +45,8 @@ public byte[] toFeature(GridAggregation gridAggregation, InternalGeoGridBucket b throws IOException { final Rectangle r = gridAggregation.toRectangle(key); final InternalGeoCentroid centroid = bucket.getAggregations().get(RestVectorTileAction.CENTROID_AGG_NAME); - final double featureLon = Math.min(Math.max(centroid.centroid().getX(), r.getMinLon()), r.getMaxLon()); - final double featureLat = Math.min(Math.max(centroid.centroid().getY(), r.getMinLat()), r.getMaxLat()); + final double featureLon = Math.min(Math.max(centroid.centroid().getX(), r.getMinLon()), quantizeLonDown(r.getMaxLon())); + final double featureLat = Math.min(Math.max(centroid.centroid().getY(), quantizeLatUp(r.getMinLat())), r.getMaxLat()); return featureFactory.point(featureLon, featureLat); } }; diff --git a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java index 1fee6eda5ad6d..df089381e70b2 100644 --- a/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java +++ b/x-pack/plugin/voting-only-node/src/main/java/org/elasticsearch/cluster/coordination/votingonly/VotingOnlyNodePlugin.java @@ -11,32 +11,22 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfiguration; import org.elasticsearch.cluster.coordination.CoordinationState.VoteCollection; import org.elasticsearch.cluster.coordination.ElectionStrategy; import org.elasticsearch.cluster.coordination.Join; import org.elasticsearch.cluster.coordination.PublicationTransportHandler; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.routing.allocation.AllocationService; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.DiscoveryModule; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ClusterCoordinationPlugin; import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.repositories.RepositoriesService; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; @@ -45,8 +35,6 @@ import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponseHandler; -import org.elasticsearch.watcher.ResourceWatcherService; -import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; @@ -84,23 +72,8 @@ public static boolean isFullMasterNode(DiscoveryNode discoveryNode) { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { - this.threadPool.set(threadPool); + public Collection createComponents(PluginServices services) { + this.threadPool.set(services.threadPool()); return Collections.emptyList(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java index 8df698d8150de..1ce21cae4aeeb 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java @@ -48,7 +48,7 @@ public void testCanUseAnyConcreteIndexName() throws Exception { assertBusy(() -> timeWarp().trigger("mywatch")); assertBusy(() -> { - SearchResponse searchResult = client().prepareSearch(watchResultsIndex).setTrackTotalHits(true).get(); + SearchResponse searchResult = prepareSearch(watchResultsIndex).setTrackTotalHits(true).get(); assertThat((int) searchResult.getHits().getTotalHits().value, greaterThan(0)); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index cc0490664ec84..4f679742c6862 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -96,8 +96,7 @@ private void assertLatestHistoryEntry(String id, String expectedValue) throws Ex assertBusy(() -> { ensureGreen(HistoryStoreField.DATA_STREAM); refresh(HistoryStoreField.DATA_STREAM + "*"); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setSize(1) + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(1) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))) .addSort(SortBuilders.fieldSort("result.execution_time").order(SortOrder.DESC)) .get(); @@ -115,8 +114,7 @@ private void assertTotalHistoryEntries(String id, long expectedCount) throws Exc assertBusy(() -> { // Watcher history is now written asynchronously, so we check this in an assertBusy ensureGreen(HistoryStoreField.DATA_STREAM); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setSize(0) + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(0) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))) .get(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java index ee0cd616461c4..00c960e5631b7 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/email/EmailAttachmentTests.java @@ -194,10 +194,9 @@ public void testThatEmailAttachmentsAreSent() throws Exception { assertBusy(() -> { SearchResponse searchResponse; try { - searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setQuery(QueryBuilders.termQuery("watch_id", "_test_id")) - .execute() - .actionGet(); + searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setQuery( + QueryBuilders.termQuery("watch_id", "_test_id") + ).execute().actionGet(); } catch (SearchPhaseExecutionException e) { if (e.getCause() instanceof NoShardAvailableActionException) { // Nothing has created the index yet diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionSearchTests.java index cfa70942566ee..2a16bfe0b491e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/ArrayCompareConditionSearchTests.java @@ -44,9 +44,9 @@ public void testExecuteWithAggs() throws Exception { refresh(); - SearchResponse response = client().prepareSearch(index) - .addAggregation(AggregationBuilders.terms("top_tweeters").field("user.screen_name.keyword").size(3)) - .get(); + SearchResponse response = prepareSearch(index).addAggregation( + AggregationBuilders.terms("top_tweeters").field("user.screen_name.keyword").size(3) + ).get(); ArrayCompareCondition condition = new ArrayCompareCondition( "ctx.payload.aggregations.top_tweeters.buckets", @@ -80,8 +80,7 @@ public void testExecuteWithAggs() throws Exception { client().prepareIndex(index).setSource(source("fights_for_the_users", "you know, for the users", numberOfDocuments)).get(); refresh(); - response = client().prepareSearch(index) - .addAggregation(AggregationBuilders.terms("top_tweeters").field("user.screen_name.keyword").size(3)) + response = prepareSearch(index).addAggregation(AggregationBuilders.terms("top_tweeters").field("user.screen_name.keyword").size(3)) .get(); ctx = mockExecutionContext("_name", new Payload.XContent(response, ToXContent.EMPTY_PARAMS)); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index ac728f59d5d3b..9f957e8dc959e 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -40,14 +40,12 @@ public void testExecuteWithAggs() throws Exception { client().prepareIndex("my-index").setSource("@timestamp", "2005-01-01T00:30").get(); refresh(); - SearchResponse response = client().prepareSearch("my-index") - .addAggregation( - AggregationBuilders.dateHistogram("rate") - .field("@timestamp") - .fixedInterval(DateHistogramInterval.HOUR) - .order(BucketOrder.count(false)) - ) - .get(); + SearchResponse response = prepareSearch("my-index").addAggregation( + AggregationBuilders.dateHistogram("rate") + .field("@timestamp") + .fixedInterval(DateHistogramInterval.HOUR) + .order(BucketOrder.count(false)) + ).get(); CompareCondition condition = new CompareCondition( "ctx.payload.aggregations.rate.buckets.0.doc_count", @@ -66,14 +64,12 @@ public void testExecuteWithAggs() throws Exception { client().prepareIndex("my-index").setSource("@timestamp", "2005-01-01T00:40").get(); refresh(); - response = client().prepareSearch("my-index") - .addAggregation( - AggregationBuilders.dateHistogram("rate") - .field("@timestamp") - .fixedInterval(DateHistogramInterval.HOUR) - .order(BucketOrder.count(false)) - ) - .get(); + response = prepareSearch("my-index").addAggregation( + AggregationBuilders.dateHistogram("rate") + .field("@timestamp") + .fixedInterval(DateHistogramInterval.HOUR) + .order(BucketOrder.count(false)) + ).get(); ctx = mockExecutionContext("_name", new Payload.XContent(response, ToXContent.EMPTY_PARAMS)); result = condition.execute(ctx); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 6324b9fd9dc58..b0e71ecfa3189 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -95,15 +95,13 @@ public void testEmailFields() throws Exception { // the action should fail as no email server is available assertWatchWithMinimumActionsCount("_id", ExecutionState.EXECUTED, 1); - SearchResponse response = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setSource( - searchSource().aggregation(terms("from").field("result.actions.email.message.from")) - .aggregation(terms("to").field("result.actions.email.message.to")) - .aggregation(terms("cc").field("result.actions.email.message.cc")) - .aggregation(terms("bcc").field("result.actions.email.message.bcc")) - .aggregation(terms("reply_to").field("result.actions.email.message.reply_to")) - ) - .get(); + SearchResponse response = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSource( + searchSource().aggregation(terms("from").field("result.actions.email.message.from")) + .aggregation(terms("to").field("result.actions.email.message.to")) + .aggregation(terms("cc").field("result.actions.email.message.cc")) + .aggregation(terms("bcc").field("result.actions.email.message.bcc")) + .aggregation(terms("reply_to").field("result.actions.email.message.reply_to")) + ).get(); assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index a5b6d8af85d98..36d2cf0239bdc 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -94,13 +94,11 @@ public void testHttpFields() throws Exception { // the action should fail as no email server is available assertWatchWithMinimumActionsCount("_id", ExecutionState.EXECUTED, 1); - SearchResponse response = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setSource( - searchSource().aggregation(terms("input_result_path").field("result.input.http.request.path")) - .aggregation(terms("input_result_host").field("result.input.http.request.host")) - .aggregation(terms("webhook_path").field("result.actions.webhook.request.path")) - ) - .get(); + SearchResponse response = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSource( + searchSource().aggregation(terms("input_result_path").field("result.input.http.request.path")) + .aggregation(terms("input_result_host").field("result.input.http.request.host")) + .aggregation(terms("webhook_path").field("result.actions.webhook.request.path")) + ).get(); assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); @@ -166,10 +164,7 @@ public void testExceptionMapping() throws Exception { assertBusy(() -> { // ensure watcher history index has been written with this id flushAndRefresh(HistoryStoreField.INDEX_PREFIX + "*"); - assertHitCount( - client().prepareSearch(HistoryStoreField.INDEX_PREFIX + "*").setQuery(QueryBuilders.termQuery("watch_id", id)), - 1L - ); + assertHitCount(prepareSearch(HistoryStoreField.INDEX_PREFIX + "*").setQuery(QueryBuilders.termQuery("watch_id", id)), 1L); }); // ensure that enabled is set to false diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 7b122d2507853..ecd3424f88139 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -48,9 +48,9 @@ public void testIndexActionFields() throws Exception { flush(); refresh(); - SearchResponse response = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setSource(searchSource().aggregation(terms("index_action_indices").field("result.actions.index.response.index"))) - .get(); + SearchResponse response = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSource( + searchSource().aggregation(terms("index_action_indices").field("result.actions.index.response.index")) + ).get(); assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 7cd5bce4372ec..4fba54f7e0208 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -64,13 +64,11 @@ public void testHttpFields() throws Exception { // the action should fail as no email server is available assertWatchWithMinimumActionsCount("_id", ExecutionState.EXECUTED, 1); - SearchResponse response = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setSource( - searchSource().aggregation(terms("input_search_type").field("result.input.search.request.search_type")) - .aggregation(terms("input_indices").field("result.input.search.request.indices")) - .aggregation(terms("input_body").field("result.input.search.request.body")) - ) - .get(); + SearchResponse response = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSource( + searchSource().aggregation(terms("input_search_type").field("result.input.search.request.search_type")) + .aggregation(terms("input_indices").field("result.input.search.request.indices")) + .aggregation(terms("input_body").field("result.input.search.request.body")) + ).get(); assertThat(response, notNullValue()); assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java index 1f0f38ac5be5e..cec68468acf0d 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/input/chain/ChainIntegrationTests.java @@ -82,7 +82,7 @@ public void testChainedInputsAreWorking() throws Exception { public void assertWatchExecuted() { try { refresh(); - SearchResponse searchResponse = client().prepareSearch("my-index").get(); + SearchResponse searchResponse = prepareSearch("my-index").get(); assertHitCount(searchResponse, 1); assertThat(searchResponse.getHits().getAt(0).getSourceAsString(), containsString("the-most-awesome-index-ever")); } catch (IndexNotFoundException e) { diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 12ae292f8da2f..93741f8e48ea5 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -274,11 +274,7 @@ public void replaceWatcherIndexWithRandomlyNamedIndex(String originalIndexOrAlia newSettings.remove("index.creation_date"); newSettings.remove("index.version.created"); - CreateIndexResponse createIndexResponse = indicesAdmin().prepareCreate(to) - .setMapping(mapping.sourceAsMap()) - .setSettings(newSettings) - .get(); - assertTrue(createIndexResponse.isAcknowledged()); + assertAcked(indicesAdmin().prepareCreate(to).setMapping(mapping.sourceAsMap()).setSettings(newSettings)); ensureGreen(to); AtomicReference originalIndex = new AtomicReference<>(originalIndexOrAlias); @@ -315,12 +311,12 @@ protected long watchRecordCount(QueryBuilder query) { } protected long docCount(String index, SearchSourceBuilder source) { - SearchRequestBuilder builder = client().prepareSearch(index).setSource(source).setSize(0); + SearchRequestBuilder builder = prepareSearch(index).setSource(source).setSize(0); return builder.get().getHits().getTotalHits().value; } protected SearchResponse searchHistory(SearchSourceBuilder builder) { - return client().prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSource(builder).get(); + return prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSource(builder).get(); } protected T getInstanceFromMaster(Class type) { @@ -371,8 +367,9 @@ protected void assertWatchWithMinimumPerformedActionsCount( } refresh(); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setIndicesOptions( + IndicesOptions.lenientExpandOpen() + ) .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))) .get(); lastResponse.set(searchResponse); @@ -403,17 +400,16 @@ protected void assertWatchWithMinimumPerformedActionsCount( } protected SearchResponse searchWatchRecords(Consumer requestBuilderCallback) { - SearchRequestBuilder builder = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*"); + SearchRequestBuilder builder = prepareSearch(HistoryStoreField.DATA_STREAM + "*"); requestBuilderCallback.accept(builder); return builder.get(); } protected long findNumberOfPerformedActions(String watchName) { refresh(); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))) - .get(); + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setIndicesOptions( + IndicesOptions.lenientExpandOpen() + ).setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", ExecutionState.EXECUTED.id()))).get(); return searchResponse.getHits().getTotalHits().value; } @@ -438,8 +434,9 @@ protected void assertWatchWithNoActionNeeded(final String watchName, final long assertThat(routingTable.allPrimaryShardsActive(), is(true)); } refresh(); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setIndicesOptions( + IndicesOptions.lenientExpandOpen() + ) .setQuery( boolQuery().must(matchQuery("watch_id", watchName)) .must(matchQuery("state", ExecutionState.EXECUTION_NOT_NEEDED.id())) @@ -477,10 +474,9 @@ protected void assertWatchWithMinimumActionsCount(final String watchName, final } refresh(); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setIndicesOptions(IndicesOptions.lenientExpandOpen()) - .setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", recordState.id()))) - .get(); + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setIndicesOptions( + IndicesOptions.lenientExpandOpen() + ).setQuery(boolQuery().must(matchQuery("watch_id", watchName)).must(matchQuery("state", recordState.id()))).get(); assertThat( "could not find executed watch record", searchResponse.getHits().getTotalHits().value, diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java index 0acefafb50ca4..5f572b3646365 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BasicWatcherTests.java @@ -134,7 +134,7 @@ public void testDeleteWatch() throws Exception { assertThat(deleteWatchResponse.isFound(), is(true)); refresh(); - assertHitCount(client().prepareSearch(Watch.INDEX).setSize(0), 0L); + assertHitCount(prepareSearch(Watch.INDEX).setSize(0), 0L); // Deleting the same watch for the second time deleteWatchResponse = new DeleteWatchRequestBuilder(client()).setId("_name").get(); @@ -236,7 +236,6 @@ public void testConditionSearchWithIndexedTemplate() throws Exception { ), XContentType.JSON ) - .get() ); Script template = new Script(ScriptType.STORED, null, "my-template", Collections.emptyMap()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 6c4a75929733f..c32246e33c571 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -193,7 +193,7 @@ public void testLoadExistingWatchesUponStartup() throws Exception { ); } bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); - assertHitCount(client().prepareSearch(Watch.INDEX).setSize(0), numWatches); + assertHitCount(prepareSearch(Watch.INDEX).setSize(0), numWatches); startWatcher(); @@ -318,12 +318,12 @@ private void assertSingleExecutionAndCompleteWatchHistory(final long numberOfWat assertThat(maxSize, equalTo(0L)); refresh(); - SearchResponse searchResponse = client().prepareSearch("output").get(); + SearchResponse searchResponse = prepareSearch("output").get(); assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(numberOfWatches))); long successfulWatchExecutions = searchResponse.getHits().getTotalHits().value; // the watch history should contain entries for each triggered watch, which a few have been marked as not executed - SearchResponse historySearchResponse = client().prepareSearch(HistoryStoreField.INDEX_PREFIX + "*").setSize(10000).get(); + SearchResponse historySearchResponse = prepareSearch(HistoryStoreField.INDEX_PREFIX + "*").setSize(10000).get(); assertHitCount(historySearchResponse, expectedWatchHistoryCount); long notExecutedCount = Arrays.stream(historySearchResponse.getHits().getHits()) .filter(hit -> hit.getSourceAsMap().get("state").equals(ExecutionState.NOT_EXECUTED_ALREADY_QUEUED.id())) @@ -402,7 +402,7 @@ public void testWatchRecordSavedTwice() throws Exception { // but even then since the execution of the watch record is async it may take a little bit before // the actual documents are in the output index refresh(); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords).get(); + SearchResponse searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords).get(); assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo((long) numRecords)); for (int i = 0; i < numRecords; i++) { assertThat(searchResponse.getHits().getAt(i).getSourceAsMap().get("state"), is(ExecutionState.EXECUTED.id())); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java index 3e176f396fabc..2332ef24ff5ef 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/HistoryIntegrationTests.java @@ -73,7 +73,7 @@ public void testThatHistoryIsWrittenWithChainedInput() throws Exception { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); - assertHitCount(client().prepareSearch(".watcher-history-*"), 1); + assertHitCount(prepareSearch(".watcher-history-*"), 1); }); } @@ -106,7 +106,7 @@ public void testFailedInputResultWithDotsInFieldNameGetsStored() throws Exceptio assertBusy(() -> { refresh(".watcher-history*"); - assertHitCount(client().prepareSearch(".watcher-history*").setSize(0), 1); + assertHitCount(prepareSearch(".watcher-history*").setSize(0), 1); }); // as fields with dots are allowed in 5.0 again, the mapping must be checked in addition @@ -151,7 +151,7 @@ public void testPayloadInputWithDotsInFieldNameWorks() throws Exception { assertBusy(() -> { refresh(".watcher-history*"); - assertHitCount(client().prepareSearch(".watcher-history*").setSize(0), 1); + assertHitCount(prepareSearch(".watcher-history*").setSize(0), 1); }); // as fields with dots are allowed in 5.0 again, the mapping must be checked in addition @@ -187,7 +187,7 @@ public void testThatHistoryContainsStatus() throws Exception { assertBusy(() -> { refresh(".watcher-history*"); - SearchResponse searchResponse = client().prepareSearch(".watcher-history*").setSize(1).get(); + SearchResponse searchResponse = prepareSearch(".watcher-history*").setSize(1).get(); assertHitCount(searchResponse, 1); SearchHit hit = searchResponse.getHits().getAt(0); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index 817b8a4d7ff90..5c4039566661a 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -50,7 +50,7 @@ public void testHistoryOnRejection() throws Exception { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); - SearchResponse searchResponse = client().prepareSearch(".watcher-history-*").get(); + SearchResponse searchResponse = prepareSearch(".watcher-history-*").get(); assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index de53486c235dd..be9b2da6e739c 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -67,7 +67,7 @@ public void testThatLoadingWithNonExistingIndexWorks() throws Exception { assertBusy(() -> { RefreshResponse refreshResponse = indicesAdmin().prepareRefresh(".watcher-history*").get(); assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); - SearchResponse searchResponse = client().prepareSearch(".watcher-history*").setSize(0).get(); + SearchResponse searchResponse = prepareSearch(".watcher-history*").setSize(0).get(); assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(1L))); }, 30, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java index 82f302b6fb44d..83a3b175819bd 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchAckTests.java @@ -236,7 +236,7 @@ public void testAckWithRestart() throws Exception { assertThat(ackResponse.getStatus().actionStatus("_id").ackStatus().state(), is(ActionStatus.AckStatus.State.ACKED)); refresh("actions"); - long countAfterAck = client().prepareSearch("actions").setQuery(matchAllQuery()).get().getHits().getTotalHits().value; + long countAfterAck = prepareSearch("actions").setQuery(matchAllQuery()).get().getHits().getTotalHits().value; assertThat(countAfterAck, greaterThanOrEqualTo(1L)); restartWatcherRandomly(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 4f5fabac81973..bb4fa3b12bab4 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -68,9 +68,7 @@ public void testWatchMetadata() throws Exception { refresh(); SearchResponse searchResponse; try { - searchResponse = client().prepareSearch(HistoryStoreField.DATA_STREAM + "*") - .setQuery(termQuery("metadata.foo", "bar")) - .get(); + searchResponse = prepareSearch(HistoryStoreField.DATA_STREAM + "*").setQuery(termQuery("metadata.foo", "bar")).get(); } catch (SearchPhaseExecutionException e) { if (e.getCause() instanceof NoShardAvailableActionException) { // Nothing has created the index yet diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 75489785dfaad..5ea85eb813982 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -141,13 +141,13 @@ public void testScriptTransform() throws Exception { assertWatchWithMinimumPerformedActionsCount("_id2", 1, false); refresh(); - SearchResponse response = client().prepareSearch("output1").get(); + SearchResponse response = prepareSearch("output1").get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); - response = client().prepareSearch("output2").get(); + response = prepareSearch("output2").get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); @@ -186,12 +186,12 @@ public void testSearchTransform() throws Exception { assertWatchWithMinimumPerformedActionsCount("_id2", 1, false); refresh(); - SearchResponse response = client().prepareSearch("output1").get(); + SearchResponse response = prepareSearch("output1").get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); - response = client().prepareSearch("output2").get(); + response = prepareSearch("output2").get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); @@ -225,13 +225,13 @@ public void testChainTransform() throws Exception { assertWatchWithMinimumPerformedActionsCount("_id2", 1, false); refresh(); - SearchResponse response = client().prepareSearch("output1").get(); + SearchResponse response = prepareSearch("output1").get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); - response = client().prepareSearch("output2").get(); + response = prepareSearch("output2").get(); assertNoFailures(response); assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java index 3cbf38fb8af4e..e48c4efd32b0d 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transport/action/delete/DeleteWatchTests.java @@ -81,9 +81,7 @@ public void testWatchDeletionDuringExecutionWorks() throws Exception { // during execution refresh(HistoryStoreField.INDEX_PREFIX + "*"); - SearchResponse searchResponse = client().prepareSearch(HistoryStoreField.INDEX_PREFIX + "*") - .setQuery(matchAllQuery()) - .get(); + SearchResponse searchResponse = prepareSearch(HistoryStoreField.INDEX_PREFIX + "*").setQuery(matchAllQuery()).get(); assertHitCount(searchResponse, 1); Map source = searchResponse.getHits().getAt(0).getSourceAsMap(); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java index e5fd1fcbf2035..a46b42e2153bd 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java @@ -24,10 +24,8 @@ import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -39,26 +37,21 @@ import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; -import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; -import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.TemplateScript; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.FixedExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.XPackPlugin; @@ -305,26 +298,18 @@ protected Clock getClock() { } @Override - public Collection createComponents( - Client client, - ClusterService clusterService, - ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, - ScriptService scriptService, - NamedXContentRegistry xContentRegistry, - Environment environment, - NodeEnvironment nodeEnvironment, - NamedWriteableRegistry namedWriteableRegistry, - IndexNameExpressionResolver expressionResolver, - Supplier repositoriesServiceSupplier, - TelemetryProvider telemetryProvider, - AllocationService allocationService, - IndicesService indicesService - ) { + public Collection createComponents(PluginServices services) { if (enabled == false) { return Collections.emptyList(); } + Client client = services.client(); + ClusterService clusterService = services.clusterService(); + ThreadPool threadPool = services.threadPool(); + Environment environment = services.environment(); + ScriptService scriptService = services.scriptService(); + NamedXContentRegistry xContentRegistry = services.xContentRegistry(); + // only initialize these classes if Watcher is enabled, and only after the plugin security policy for Watcher is in place BodyPartSource.init(); Account.init(); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java index e1f1933242414..d3af489a77a2a 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java @@ -14,8 +14,8 @@ import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.indices.SystemIndexDescriptor; import org.elasticsearch.indices.TestIndexNameExpressionResolver; +import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.internal.DocumentParsingObserver; -import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.threadpool.ExecutorBuilder; @@ -74,10 +74,7 @@ public void testWatcherDisabledTests() throws Exception { watcher.onIndexModule(indexModule); // also no component creation if not enabled - assertThat( - watcher.createComponents(null, null, null, null, null, null, null, null, null, null, null, TelemetryProvider.NOOP, null, null), - hasSize(0) - ); + assertThat(watcher.createComponents(mock(Plugin.PluginServices.class)), hasSize(0)); watcher.close(); } diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 54adb26f7ba69..ec206c64a2371 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -52,6 +52,7 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.LowercaseNormalizer; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -59,6 +60,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.StringBinaryIndexFieldData; import org.elasticsearch.index.mapper.BinaryFieldMapper.CustomBinaryDocValuesField; +import org.elasticsearch.index.mapper.BlockDocValuesReader; +import org.elasticsearch.index.mapper.BlockLoader; import org.elasticsearch.index.mapper.DocumentParserContext; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -266,7 +269,7 @@ public static final class WildcardFieldType extends MappedFieldType { private WildcardFieldType(String name, String nullValue, int ignoreAbove, IndexVersion version, Map meta) { super(name, true, false, true, Defaults.TEXT_SEARCH_INFO, meta); - if (version.onOrAfter(IndexVersion.V_7_10_0)) { + if (version.onOrAfter(IndexVersions.V_7_10_0)) { this.analyzer = WILDCARD_ANALYZER_7_10; } else { this.analyzer = WILDCARD_ANALYZER_7_9; @@ -850,6 +853,16 @@ public Query termsQuery(Collection values, SearchExecutionContext context) { return new ConstantScoreQuery(bq.build()); } + @Override + public BlockLoader blockLoader(BlockLoaderContext blContext) { + if (hasDocValues()) { + // TODO it'd almost certainly be faster to drop directly to doc values like we do with keyword but this'll do for now + IndexFieldData fd = new StringBinaryIndexFieldData(name(), CoreValuesSourceType.KEYWORD, null); + return BlockDocValuesReader.bytesRefsFromDocValues(context -> fd.load(context).getBytesValues()); + } + return null; + } + @Override public IndexFieldData.Builder fielddataBuilder(FieldDataContext fieldDataContext) { failIfNoDocValues(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 79727b9279a98..a17cb7474a681 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -50,6 +50,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -82,6 +83,7 @@ import java.util.HashSet; import java.util.List; import java.util.function.BiFunction; +import java.util.function.Function; import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.equalTo; @@ -125,7 +127,7 @@ public void setUp() throws Exception { builder.ignoreAbove(MAX_FIELD_LENGTH); wildcardFieldType = builder.build(MapperBuilderContext.root(false, false)); - Builder builder79 = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersion.V_7_9_0); + Builder builder79 = new WildcardFieldMapper.Builder(WILDCARD_FIELD_NAME, IndexVersions.V_7_9_0); wildcardFieldType79 = builder79.build(MapperBuilderContext.root(false, false)); org.elasticsearch.index.mapper.KeywordFieldMapper.Builder kwBuilder = new KeywordFieldMapper.Builder( @@ -1214,6 +1216,11 @@ protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) return new WildcardSyntheticSourceSupport(); } + @Override + protected Function loadBlockExpected() { + return v -> ((BytesRef) v).utf8ToString(); + } + static class WildcardSyntheticSourceSupport implements SyntheticSourceSupport { private final Integer ignoreAbove = randomBoolean() ? null : between(10, 100); private final boolean allIgnored = ignoreAbove != null && rarely(); @@ -1223,7 +1230,11 @@ static class WildcardSyntheticSourceSupport implements SyntheticSourceSupport { public SyntheticSourceExample example(int maxValues) { if (randomBoolean()) { Tuple v = generateValue(); - return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping); + Object loadBlock = v.v2(); + if (ignoreAbove != null && v.v2().length() > ignoreAbove) { + loadBlock = null; + } + return new SyntheticSourceExample(v.v1(), v.v2(), loadBlock, this::mapping); } List> values = randomList(1, maxValues, this::generateValue); List in = values.stream().map(Tuple::v1).toList(); @@ -1238,9 +1249,11 @@ public SyntheticSourceExample example(int maxValues) { }); List outList = new ArrayList<>(new HashSet<>(docValuesValues)); Collections.sort(outList); + List outBlockList = List.copyOf(outList); + Object outBlockResult = outBlockList.size() == 1 ? outBlockList.get(0) : outBlockList; outList.addAll(outExtraValues); Object out = outList.size() == 1 ? outList.get(0) : outList; - return new SyntheticSourceExample(in, out, this::mapping); + return new SyntheticSourceExample(in, out, outBlockResult, this::mapping); } private Tuple generateValue() { diff --git a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java index 7a264d4b71ed5..39d9b7cef1d32 100644 --- a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java +++ b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java @@ -164,7 +164,7 @@ private void setUpDataStreamWriteDocsAndRollover(String dataStreamName, Settings null ) ) - ).actionGet() + ) ); assertAcked(client().execute(CreateDataStreamAction.INSTANCE, new CreateDataStreamAction.Request(dataStreamName)).actionGet()); diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 778af00403ad0..dda8d6a249bc4 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -8,6 +8,7 @@ dependencies { clusterModules project(':modules:ingest-common') clusterModules project(':modules:reindex') clusterModules project(':modules:analysis-common') + clusterModules project(':modules:health-shards-availability') clusterModules project(xpackModule('stack')) clusterModules project(xpackModule('ilm')) clusterModules project(xpackModule('mapper-constant-keyword')) diff --git a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java index 10d5dc23eed09..a0f5ba84fd355 100644 --- a/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java +++ b/x-pack/qa/core-rest-tests-with-security/src/yamlRestTest/java/org/elasticsearch/xpack/security/CoreWithSecurityClientYamlTestSuiteIT.java @@ -39,6 +39,7 @@ public class CoreWithSecurityClientYamlTestSuiteIT extends ESClientYamlSuiteTest .module("reindex") .module("wildcard") .module("analysis-common") + .module("health-shards-availability") .setting("xpack.security.enabled", "true") .setting("xpack.watcher.enabled", "false") .setting("xpack.ml.enabled", "false") diff --git a/x-pack/qa/kerberos-tests/build.gradle b/x-pack/qa/kerberos-tests/build.gradle index 30fac49096679..62d6f0a1e34b8 100644 --- a/x-pack/qa/kerberos-tests/build.gradle +++ b/x-pack/qa/kerberos-tests/build.gradle @@ -12,6 +12,13 @@ dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) } +normalization { + runtimeClasspath { + ignore 'krb5.conf' + ignore '*.keytab' + } +} + tasks.register("copyKeytabToGeneratedResources", Copy) { from project(':test:fixtures:krb5kdc-fixture').ext.krb5Keytabs("peppa", "peppa.keytab") into "$buildDir/generated-resources/keytabs" @@ -28,6 +35,7 @@ tasks.register("copyConfToGeneratedResources", Copy) { String realm = "BUILD.ELASTIC.CO" tasks.named("javaRestTest").configure { + dependsOn "copyKeytabToGeneratedResources", "copyConfToGeneratedResources" usesDefaultDistribution() Path peppaKeytab = Paths.get("${project.buildDir}", "generated-resources", "keytabs", "peppa.keytab") Path krb5Conf = Paths.get("${project.buildDir}", "generated-resources", "conf", "krb5.conf") @@ -35,12 +43,9 @@ tasks.named("javaRestTest").configure { nonInputProperties.systemProperty 'test.userkt.keytab', "${peppaKeytab}" nonInputProperties.systemProperty 'test.userpwd', "george@${realm}" nonInputProperties.systemProperty 'test.krb5.conf', "${krb5Conf}" + nonInputProperties.systemProperty 'java.security.krb5.conf', "${krb5Conf}" systemProperty 'test.userpwd.password', "dino_but_longer_than_14_chars" - jvmArgs([ - "-Djava.security.krb5.conf=${krb5Conf}", - "-Dsun.security.krb5.debug=true" - ]) - dependsOn "copyKeytabToGeneratedResources", "copyConfToGeneratedResources" + systemProperty 'sun.security.krb5.debug', true classpath += files("$buildDir/generated-resources/keytabs") classpath += files("$buildDir/generated-resources/conf") } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle index f9d60181fc815..cce18a4bd1579 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-resources' @@ -40,11 +41,11 @@ def queryingCluster = testClusters.register('querying-cluster') { setting 'cluster.remote.my_remote_cluster.mode', 'proxy' setting 'cluster.remote.my_remote_cluster.proxy_address', { "\"${fulfillingCluster.get().getAllTransportPortURI().get(0)}\"" - } + }, IGNORE_VALUE } else { setting 'cluster.remote.my_remote_cluster.seeds', { fulfillingCluster.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE } } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle index 35b7a981fca36..69c0e8b20c2c4 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-resources' @@ -40,11 +41,11 @@ def queryingCluster = testClusters.register('querying-cluster') { setting 'cluster.remote.my_remote_cluster.mode', 'proxy' setting 'cluster.remote.my_remote_cluster.proxy_address', { "\"${fulfillingCluster.get().getAllTransportPortURI().get(0)}\"" - } + }, IGNORE_VALUE } else { setting 'cluster.remote.my_remote_cluster.seeds', { fulfillingCluster.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE } } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle index 810895abe1e86..1164aa240ee22 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle @@ -1,5 +1,6 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.standalone-rest-test' apply plugin: 'elasticsearch.rest-resources' @@ -63,11 +64,11 @@ def queryingCluster = testClusters.register('querying-cluster') { setting 'cluster.remote.my_remote_cluster.mode', 'proxy' setting 'cluster.remote.my_remote_cluster.proxy_address', { "\"${fulfillingCluster.get().getAllTransportPortURI().get(0)}\"" - } + }, IGNORE_VALUE } else { setting 'cluster.remote.my_remote_cluster.seeds', { fulfillingCluster.get().getAllTransportPortURI().collect { "\"$it\"" }.toString() - } + }, IGNORE_VALUE } } diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index da8802e452c43..3c55aa8aa4663 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -13,6 +13,7 @@ import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.transform.UnzipTransform +import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.jdk-download' apply plugin: 'elasticsearch.internal-testclusters' @@ -86,7 +87,7 @@ if (OS.current() == OS.WINDOWS) { numberOfNodes = 2 versions = [project.version, project.version] // to test full cluster restart - setting 'path.repo', repoLocation + setting 'path.repo', repoLocation, IGNORE_VALUE setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' @@ -151,7 +152,7 @@ if (OS.current() == OS.WINDOWS) { } tasks.matching { it.name.startsWith("javaRestTest") && it.name.endsWith(versionNoDots) }.configureEach { - it.systemProperty "tests.repo.location", repoLocation + it.nonInputProperties.systemProperty "tests.repo.location", repoLocation it.systemProperty "tests.es.version", version.toString() /* Use a closure on the string to delay evaluation until right before we diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index 368b8033e0b8d..865ba0c07cfeb 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -30,10 +30,16 @@ public abstract class AbstractUpgradeTestCase extends ESRestTestCase { new SecureString(SecuritySettingsSourceField.TEST_PASSWORD) ); - protected static final Version UPGRADE_FROM_VERSION = Version.fromString(System.getProperty("tests.upgrade_from_version")); + protected static final String UPGRADE_FROM_VERSION = System.getProperty("tests.upgrade_from_version"); protected static final boolean SKIP_ML_TESTS = Booleans.parseBoolean(System.getProperty("tests.ml.skip", "false")); + // TODO: replace with feature testing + @Deprecated + protected static boolean isOriginalClusterVersionAtLeast(Version supportedVersion) { + return Version.fromString(UPGRADE_FROM_VERSION).onOrAfter(supportedVersion); + } + @Override protected boolean resetFeatureStates() { return false; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java index f9b72a0d89024..850a94f7133e9 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ApiKeyBackwardsCompatibilityIT.java @@ -54,7 +54,7 @@ public class ApiKeyBackwardsCompatibilityIT extends AbstractUpgradeTestCase { public void testCreatingAndUpdatingApiKeys() throws Exception { assumeTrue( "The remote_indices for API Keys are not supported before version " + API_KEY_SUPPORT_REMOTE_INDICES_VERSION, - UPGRADE_FROM_VERSION.before(API_KEY_SUPPORT_REMOTE_INDICES_VERSION) + isOriginalClusterVersionAtLeast(API_KEY_SUPPORT_REMOTE_INDICES_VERSION) == false ); switch (CLUSTER_TYPE) { case OLD -> { @@ -183,7 +183,7 @@ private Tuple createOrGrantApiKey(RestClient client, String role "role_descriptors": %s }""", name, roles); // Grant API did not exist before 7.7.0 - final boolean grantApiKey = randomBoolean() && UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_7_0); + final boolean grantApiKey = randomBoolean() && isOriginalClusterVersionAtLeast(Version.V_7_7_0); if (grantApiKey) { createApiKeyRequest = new Request("POST", "/_security/api_key/grant"); createApiKeyRequest.setJsonEntity(org.elasticsearch.common.Strings.format(""" @@ -220,16 +220,16 @@ private void updateOrBulkUpdateApiKey(String id, String roles) throws IOExceptio private boolean isUpdateApiSupported(RestClient client) { return switch (CLUSTER_TYPE) { - case OLD -> UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_4_0); // Update API was introduced in 8.4.0. - case MIXED -> UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_4_0) || client == newVersionClient; + case OLD -> isOriginalClusterVersionAtLeast(Version.V_8_4_0); // Update API was introduced in 8.4.0. + case MIXED -> isOriginalClusterVersionAtLeast(Version.V_8_4_0) || client == newVersionClient; case UPGRADED -> true; }; } private boolean isBulkUpdateApiSupported(RestClient client) { return switch (CLUSTER_TYPE) { - case OLD -> UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_5_0); // Bulk update API was introduced in 8.5.0. - case MIXED -> UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_5_0) || client == newVersionClient; + case OLD -> isOriginalClusterVersionAtLeast(Version.V_8_5_0); // Bulk update API was introduced in 8.5.0. + case MIXED -> isOriginalClusterVersionAtLeast(Version.V_8_5_0) || client == newVersionClient; case UPGRADED -> true; }; } @@ -303,11 +303,18 @@ private static String randomRoleDescriptors(boolean includeRemoteIndices) { } } + boolean nodeSupportApiKeyRemoteIndices(Map nodeDetails) { + // TODO[lor]: the method can be kept, but we need to replace version check with features checks + String versionString = (String) nodeDetails.get("version"); + Version version = Version.fromString(versionString.replace("-SNAPSHOT", "")); + return version.onOrAfter(API_KEY_SUPPORT_REMOTE_INDICES_VERSION); + } + private void createClientsByVersion() throws IOException { - Map clientsByVersion = getRestClientByVersion(); - if (clientsByVersion.size() == 2) { - for (Map.Entry client : clientsByVersion.entrySet()) { - if (client.getKey().before(API_KEY_SUPPORT_REMOTE_INDICES_VERSION)) { + var clientsByCapability = getRestClientByCapability(); + if (clientsByCapability.size() == 2) { + for (Map.Entry client : clientsByCapability.entrySet()) { + if (client.getKey() == false) { oldVersionClient = client.getValue(); } else { newVersionClient = client.getValue(); @@ -316,7 +323,7 @@ private void createClientsByVersion() throws IOException { assertThat(oldVersionClient, notNullValue()); assertThat(newVersionClient, notNullValue()); } else { - fail("expected 2 versions during rolling upgrade but got: " + clientsByVersion.size()); + fail("expected 2 versions during rolling upgrade but got: " + clientsByCapability.size()); } } @@ -332,23 +339,24 @@ private void closeClientsByVersion() throws IOException { } @SuppressWarnings("unchecked") - private Map getRestClientByVersion() throws IOException { + private Map getRestClientByCapability() throws IOException { Response response = client().performRequest(new Request("GET", "_nodes")); assertOK(response); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map nodesAsMap = objectPath.evaluate("nodes"); - Map> hostsByVersion = new HashMap<>(); + Map> hostsByCapability = new HashMap<>(); for (Map.Entry entry : nodesAsMap.entrySet()) { Map nodeDetails = (Map) entry.getValue(); - Version version = Version.fromString((String) nodeDetails.get("version")); + var capabilitySupported = nodeSupportApiKeyRemoteIndices(nodeDetails); Map httpInfo = (Map) nodeDetails.get("http"); - hostsByVersion.computeIfAbsent(version, k -> new ArrayList<>()).add(HttpHost.create((String) httpInfo.get("publish_address"))); + hostsByCapability.computeIfAbsent(capabilitySupported, k -> new ArrayList<>()) + .add(HttpHost.create((String) httpInfo.get("publish_address"))); } - Map clientsByVersion = new HashMap<>(); - for (Map.Entry> entry : hostsByVersion.entrySet()) { - clientsByVersion.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0]))); + Map clientsByCapability = new HashMap<>(); + for (var entry : hostsByCapability.entrySet()) { + clientsByCapability.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0]))); } - return clientsByVersion; + return clientsByCapability; } private static RoleDescriptor randomRoleDescriptor(boolean includeRemoteIndices) { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 4c399d0a01a93..cf2a66bc4fb5b 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -25,7 +25,7 @@ public class DataStreamsUpgradeIT extends AbstractUpgradeTestCase { public void testDataStreams() throws IOException { - assumeTrue("no data streams in versions before " + Version.V_7_9_0, UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_9_0)); + assumeTrue("no data streams in versions before " + Version.V_7_9_0, isOriginalClusterVersionAtLeast(Version.V_7_9_0)); if (CLUSTER_TYPE == ClusterType.OLD) { String requestBody = """ { @@ -110,7 +110,7 @@ public void testDataStreams() throws IOException { } public void testDataStreamValidationDoesNotBreakUpgrade() throws Exception { - assumeTrue("Bug started to occur from version: " + Version.V_7_10_2, UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_10_2)); + assumeTrue("Bug started to occur from version: " + Version.V_7_10_2, isOriginalClusterVersionAtLeast(Version.V_7_10_2)); if (CLUSTER_TYPE == ClusterType.OLD) { String requestBody = """ { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java index 87d605d29fa86..e1845e901447e 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MLModelDeploymentsUpgradeIT.java @@ -73,7 +73,7 @@ public void setUpLogging() throws IOException { { "persistent": { "logger.org.elasticsearch.xpack.ml.inference": "TRACE", - "logger.org.elasticsearch.xpack.ml.inference.assignments": "TRACE", + "logger.org.elasticsearch.xpack.ml.inference.assignments": "DEBUG", "logger.org.elasticsearch.xpack.ml.process": "DEBUG", "logger.org.elasticsearch.xpack.ml.action": "TRACE" } @@ -97,9 +97,8 @@ public void removeLogging() throws IOException { client().performRequest(request); } - @AwaitsFix(bugUrl = "mute to try and reproduce https://github.com/elastic/elasticsearch/issues/100379") public void testTrainedModelDeployment() throws Exception { - assumeTrue("NLP model deployments added in 8.0", UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_0_0)); + assumeTrue("NLP model deployments added in 8.0", isOriginalClusterVersionAtLeast(Version.V_8_0_0)); final String modelId = "upgrade-deployment-test"; @@ -135,7 +134,7 @@ public void testTrainedModelDeployment() throws Exception { } public void testTrainedModelDeploymentStopOnMixedCluster() throws Exception { - assumeTrue("NLP model deployments added in 8.0", UPGRADE_FROM_VERSION.onOrAfter(Version.V_8_0_0)); + assumeTrue("NLP model deployments added in 8.0", isOriginalClusterVersionAtLeast(Version.V_8_0_0)); final String modelId = "upgrade-deployment-test-stop-mixed-cluster"; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index 8111e9e68df8a..9913c40dac411 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -76,7 +76,10 @@ public void testSnapshotUpgrader() throws Exception { switch (CLUSTER_TYPE) { case OLD -> createJobAndSnapshots(); case MIXED -> { - assumeTrue("We should only test if old cluster is before new cluster", UPGRADE_FROM_VERSION.before(Version.CURRENT)); + assumeTrue( + "We should only test if old cluster is before new cluster", + isOriginalClusterVersionAtLeast(Version.CURRENT) == false + ); ensureHealth((request -> { request.addParameter("timeout", "70s"); request.addParameter("wait_for_nodes", "3"); @@ -85,7 +88,10 @@ public void testSnapshotUpgrader() throws Exception { testSnapshotUpgradeFailsOnMixedCluster(); } case UPGRADED -> { - assumeTrue("We should only test if old cluster is before new cluster", UPGRADE_FROM_VERSION.before(Version.CURRENT)); + assumeTrue( + "We should only test if old cluster is before new cluster", + isOriginalClusterVersionAtLeast(Version.CURRENT) == false + ); ensureHealth((request -> { request.addParameter("timeout", "70s"); request.addParameter("wait_for_nodes", "3"); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java index 23ea59811a24d..5427fb0c6cc88 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlTrainedModelsUpgradeIT.java @@ -57,7 +57,7 @@ protected Collection templatesToWaitFor() { } public void testTrainedModelInference() throws Exception { - assumeTrue("We should only test if old cluster is after trained models we GA", UPGRADE_FROM_VERSION.after(Version.V_7_13_0)); + assumeTrue("We should only test if old cluster is after trained models went GA", isOriginalClusterVersionAtLeast(Version.V_7_13_1)); switch (CLUSTER_TYPE) { case OLD -> { createIndexWithName(INDEX_NAME); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java index 9225fbfaa6642..3cdeb6dab4d91 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SearchableSnapshotsRollingUpgradeIT.java @@ -163,29 +163,28 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs final int numberOfNodes = 3; waitForNodes(numberOfNodes); - final Map nodesIdsAndVersions = nodesVersions(); + final Map nodesIdsAndVersions = nodesVersions(); assertThat("Cluster should have 3 nodes", nodesIdsAndVersions.size(), equalTo(numberOfNodes)); - final Version minVersion = nodesIdsAndVersions.values().stream().min(Version::compareTo).get(); - final Version maxVersion = nodesIdsAndVersions.values().stream().max(Version::compareTo).get(); - - final String nodeIdWithMinVersion = randomFrom( - nodesIdsAndVersions.entrySet() - .stream() - .filter(node -> minVersion.equals(node.getValue())) - .map(Map.Entry::getKey) - .collect(Collectors.toSet()) - ); - - final String nodeIdWithMaxVersion = randomValueOtherThan( - nodeIdWithMinVersion, - () -> randomFrom( - nodesIdsAndVersions.entrySet() - .stream() - .filter(node -> maxVersion.equals(node.getValue())) - .map(Map.Entry::getKey) - .collect(Collectors.toSet()) - ) + final var newVersionNodes = nodesIdsAndVersions.entrySet() + .stream() + .filter(node -> UPGRADE_FROM_VERSION.equals(node.getValue()) == false) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + + final var originalVersionNodes = nodesIdsAndVersions.entrySet() + .stream() + .filter(node -> UPGRADE_FROM_VERSION.equals(node.getValue())) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + + final String nodeIdWithOriginalVersion = randomFrom(originalVersionNodes); + + // We may not have upgraded nodes, if we are running these test on the same version (original == current) + final var upgradedVersionNodes = newVersionNodes.isEmpty() ? originalVersionNodes : newVersionNodes; + final String nodeIdWithUpgradedVersion = randomValueOtherThan( + nodeIdWithOriginalVersion, + () -> randomFrom(upgradedVersionNodes) ); // The snapshot is mounted on the node with the min. version in order to force the node to populate the blob store cache index. @@ -197,8 +196,8 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs "mounting snapshot as index [{}] with storage [{}] on node [{}] with min. version [{}]", index, storage, - nodeIdWithMinVersion, - minVersion + nodeIdWithOriginalVersion, + UPGRADE_FROM_VERSION ); mountSnapshot( repository, @@ -208,7 +207,7 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs storage, Settings.builder() // we want a specific node version to create docs in the blob cache index - .put("index.routing.allocation.include._id", nodeIdWithMinVersion) + .put("index.routing.allocation.include._id", nodeIdWithOriginalVersion) // prevent interferences with blob cache when full_copy is used .put("index.store.snapshot.cache.prewarm.enabled", false) .build() @@ -222,8 +221,8 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs "mounting the same snapshot of index [{}] with storage [{}], this time on node [{}] with higher version [{}]", index, storage, - nodeIdWithMaxVersion, - maxVersion + nodeIdWithUpgradedVersion, + nodesIdsAndVersions.get(nodeIdWithUpgradedVersion) ); mountSnapshot( repository, @@ -233,8 +232,8 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs storage, Settings.builder() // we want a specific node version to use the cached blobs created by the nodeIdWithMinVersion - .put("index.routing.allocation.include._id", nodeIdWithMaxVersion) - .put("index.routing.allocation.exclude._id", nodeIdWithMinVersion) + .put("index.routing.allocation.include._id", nodeIdWithUpgradedVersion) + .put("index.routing.allocation.exclude._id", nodeIdWithOriginalVersion) // prevent interferences with blob cache when full_copy is used .put("index.store.snapshot.cache.prewarm.enabled", false) .build() @@ -251,8 +250,8 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs "mounting snapshot as index [{}] with storage [{}] on node [{}] with max. version [{}]", index, storage, - nodeIdWithMaxVersion, - maxVersion + nodeIdWithUpgradedVersion, + nodesIdsAndVersions.get(nodeIdWithUpgradedVersion) ); mountSnapshot( repository, @@ -262,7 +261,7 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs storage, Settings.builder() // we want a specific node version to create docs in the blob cache index - .put("index.routing.allocation.include._id", nodeIdWithMaxVersion) + .put("index.routing.allocation.include._id", nodeIdWithUpgradedVersion) // prevent interferences with blob cache when full_copy is used .put("index.store.snapshot.cache.prewarm.enabled", false) .build() @@ -276,8 +275,8 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs "mounting the same snapshot of index [{}] with storage [{}], this time on node [{}] with lower version [{}]", index, storage, - nodeIdWithMinVersion, - minVersion + nodeIdWithOriginalVersion, + UPGRADE_FROM_VERSION ); mountSnapshot( repository, @@ -287,8 +286,8 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs storage, Settings.builder() // we want a specific node version to use the cached blobs created by the nodeIdWithMinVersion - .put("index.routing.allocation.include._id", nodeIdWithMinVersion) - .put("index.routing.allocation.exclude._id", nodeIdWithMaxVersion) + .put("index.routing.allocation.include._id", nodeIdWithOriginalVersion) + .put("index.routing.allocation.exclude._id", nodeIdWithUpgradedVersion) // prevent interferences with blob cache when full_copy is used .put("index.store.snapshot.cache.prewarm.enabled", false) .build() @@ -297,7 +296,7 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs assertHitCount(index, equalTo(numberOfDocs * 2L)); deleteIndex(index); - if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_13_0)) { + if (isOriginalClusterVersionAtLeast(Version.V_7_13_0)) { final Request request = new Request( "GET", "/.snapshot-blob-cache/_settings/index.routing.allocation.include._tier_preference" @@ -330,7 +329,7 @@ private void executeBlobCacheCreationTestCase(Storage storage, long numberOfDocs private static void assumeVersion(Version minSupportedVersion, Storage storageType) { assumeTrue( "Searchable snapshots with storage type [" + storageType + "] is supported since version [" + minSupportedVersion + ']', - UPGRADE_FROM_VERSION.onOrAfter(minSupportedVersion) + isOriginalClusterVersionAtLeast(minSupportedVersion) ); } @@ -364,14 +363,14 @@ private static void waitForNodes(int numberOfNodes) throws IOException { } @SuppressWarnings("unchecked") - private static Map nodesVersions() throws IOException { + private static Map nodesVersions() throws IOException { final Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, "_nodes/_all")); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); final Map nodes = (Map) extractValue(responseAsMap(response), "nodes"); assertNotNull("Nodes info is null", nodes); - final Map nodesVersions = Maps.newMapWithExpectedSize(nodes.size()); + final Map nodesVersions = Maps.newMapWithExpectedSize(nodes.size()); for (Map.Entry node : nodes.entrySet()) { - nodesVersions.put(node.getKey(), Version.fromString((String) extractValue((Map) node.getValue(), "version"))); + nodesVersions.put(node.getKey(), (String) extractValue((Map) node.getValue(), "version")); } return nodesVersions; } @@ -391,7 +390,7 @@ private static void mountSnapshot( Settings indexSettings ) throws IOException { final Request request = new Request(HttpPost.METHOD_NAME, "/_snapshot/" + repositoryName + '/' + snapshotName + "/_mount"); - if (UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_12_0)) { + if (isOriginalClusterVersionAtLeast(Version.V_7_12_0)) { request.addParameter("storage", storage.storageName()); } else { assertThat("Parameter 'storage' was introduced in 7.12.0 with " + Storage.SHARED_CACHE, storage, equalTo(Storage.FULL_COPY)); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index d82d6d5dd6747..8b2fe0d1e2af1 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -8,7 +8,6 @@ import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; -import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -43,7 +42,7 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase { @Before private void collectClientsByVersion() throws IOException { - Map clientsByVersion = getRestClientByVersion(); + Map clientsByVersion = getRestClientByVersion(); if (clientsByVersion.size() == 2) { // usual case, clients have different versions twoClients = clientsByVersion.values(); @@ -316,20 +315,20 @@ private void assertRefreshTokenInvalidated(String refreshToken) throws IOExcepti } @SuppressWarnings("unchecked") - private Map getRestClientByVersion() throws IOException { + private Map getRestClientByVersion() throws IOException { Response response = client().performRequest(new Request("GET", "_nodes")); assertOK(response); ObjectPath objectPath = ObjectPath.createFromResponse(response); Map nodesAsMap = objectPath.evaluate("nodes"); - Map> hostsByVersion = new HashMap<>(); + Map> hostsByVersion = new HashMap<>(); for (Map.Entry entry : nodesAsMap.entrySet()) { Map nodeDetails = (Map) entry.getValue(); - Version version = Version.fromString((String) nodeDetails.get("version")); + String version = (String) nodeDetails.get("version"); Map httpInfo = (Map) nodeDetails.get("http"); hostsByVersion.computeIfAbsent(version, k -> new ArrayList<>()).add(HttpHost.create((String) httpInfo.get("publish_address"))); } - Map clientsByVersion = new HashMap<>(); - for (Map.Entry> entry : hostsByVersion.entrySet()) { + Map clientsByVersion = new HashMap<>(); + for (Map.Entry> entry : hostsByVersion.entrySet()) { clientsByVersion.put(entry.getKey(), buildClient(restClientSettings(), entry.getValue().toArray(new HttpHost[0]))); } return clientsByVersion; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java index 3f4eb491d6cf9..c24665d812db6 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java @@ -9,7 +9,7 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.elasticsearch.Version; +import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -235,7 +235,7 @@ private void verifyContinuousTransformHandlesData(long expectedLastCheckpoint) t private void verifyUpgradeFailsIfMixedCluster() { // upgrade tests by design are also executed with the same version, this check must be skipped in this case, see gh#39102. - if (UPGRADE_FROM_VERSION.equals(Version.CURRENT)) { + if (UPGRADE_FROM_VERSION.equals(Build.current().version())) { return; } final Request upgradeTransformRequest = new Request("POST", getTransformEndpoint() + "_upgrade"); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java index 470525a69ea0b..3c073605969af 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java @@ -19,8 +19,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.oneOf; public class TransportVersionClusterStateUpgradeIT extends AbstractUpgradeTestCase { @@ -66,11 +64,11 @@ private boolean runTransportVersionsTest() throws Exception { switch (CLUSTER_TYPE) { case OLD -> { - if (UPGRADE_FROM_VERSION.before(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { + if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS) == false) { // Before 8.8.0 there was only DiscoveryNode#version assertFalse(description, hasTransportVersions); assertFalse(description, hasNodesVersions); - } else if (UPGRADE_FROM_VERSION.before(VERSION_INTRODUCING_NODES_VERSIONS)) { + } else if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS) == false) { // In [8.8.0, 8.11.0) we exposed just transport_versions assertTrue(description, hasTransportVersions); assertFalse(description, hasNodesVersions); @@ -81,10 +79,10 @@ private boolean runTransportVersionsTest() throws Exception { } } case MIXED -> { - if (UPGRADE_FROM_VERSION.before(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { + if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS) == false) { // Responding node might be <8.8.0 (so no extra versions) or >=8.11.0 (includes nodes_versions) assertFalse(description, hasTransportVersions); - } else if (UPGRADE_FROM_VERSION.before(VERSION_INTRODUCING_NODES_VERSIONS)) { + } else if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS) == false) { // Responding node might be in [8.8.0, 8.11.0) (transport_versions) or >=8.11.0 (includes nodes_versions) but not both assertTrue(description, hasNodesVersions || hasTransportVersions); } else { @@ -103,8 +101,8 @@ private boolean runTransportVersionsTest() throws Exception { if (hasTransportVersions) { // Upgrading from [8.8.0, 8.11.0) and the responding node is still on the old version - assertThat(description, UPGRADE_FROM_VERSION, lessThan(VERSION_INTRODUCING_NODES_VERSIONS)); - assertThat(description, UPGRADE_FROM_VERSION, greaterThanOrEqualTo(VERSION_INTRODUCING_TRANSPORT_VERSIONS)); + assertFalse(description, isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS)); + assertTrue(description, isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS)); assertNotEquals(description, ClusterType.UPGRADED, CLUSTER_TYPE); // transport_versions includes the correct version for all nodes, no inference is needed @@ -126,7 +124,10 @@ private boolean runTransportVersionsTest() throws Exception { } } else if (hasNodesVersions) { // Either upgrading from ≥8.11.0 (the responding node might be old or new), or from <8.8.0 (the responding node is new) - assertFalse(description, UPGRADE_FROM_VERSION.before(VERSION_INTRODUCING_NODES_VERSIONS) && CLUSTER_TYPE == ClusterType.OLD); + assertFalse( + description, + isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS) == false && CLUSTER_TYPE == ClusterType.OLD + ); // nodes_versions includes _a_ version for all nodes; it might be correct, or it might be inferred if we're upgrading from // <8.8.0 and the master is still an old node or the TransportVersionsFixupListener hasn't run yet @@ -144,7 +145,7 @@ private boolean runTransportVersionsTest() throws Exception { assertThat( nodeDescription, transportVersion, - UPGRADE_FROM_VERSION.onOrAfter(VERSION_INTRODUCING_TRANSPORT_VERSIONS) + isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS) ? equalTo(TransportVersion.current()) : oneOf(TransportVersion.current(), FIRST_TRANSPORT_VERSION) ); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 44eca0dc9a997..59acb7722085f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -43,7 +43,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa public void waitForTemplates() throws Exception { if (AbstractUpgradeTestCase.CLUSTER_TYPE == AbstractUpgradeTestCase.ClusterType.OLD) { try { - boolean clusterUnderstandsComposableTemplates = AbstractUpgradeTestCase.UPGRADE_FROM_VERSION.onOrAfter(Version.V_7_8_0); + boolean clusterUnderstandsComposableTemplates = AbstractUpgradeTestCase.isOriginalClusterVersionAtLeast(Version.V_7_8_0); XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index c2dffe2470b84..6027a421b62f2 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -31,6 +31,12 @@ tasks.register("copyIdpFiles", Sync) { } } +normalization { + runtimeClasspath { + ignore 'idp-metadata.xml' + } +} + tasks.named("javaRestTest").configure { usesDefaultDistribution() classpath += files(tasks.named("copyIdpFiles"))