From 2a1c4b1ff45a871ca5b4e6504b00f93a364585b6 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 6 Aug 2020 17:13:48 -0400 Subject: [PATCH 1/3] WIP --- .../gradle/JdkDownloadPluginFuncTest.groovy | 237 +++++++++ .../fixtures/AbstractGradleFuncTest.groovy | 2 +- .../gradle/fixtures/WiremockFixture.groovy | 70 +++ .../gradle/AdoptOpenJdkDownloadPluginIT.java | 50 -- .../gradle/JdkDownloadPluginIT.java | 119 ----- .../gradle/OpenJdkDownloadPluginIT.java | 68 --- .../gradle/JdkDownloadPlugin.java | 312 ++++-------- .../tar/SymbolicLinkPreservingUntarTask.java | 170 ------- .../SymbolicLinkPreservingUntarTransform.java | 110 +++++ .../gradle/transform/UnpackTransform.java | 85 ++++ .../gradle/transform/UnzipTransform.java | 54 +++ .../gradle/util/GradleUtils.java | 25 - .../src/testKit/jdk-download/build.gradle | 16 - .../testKit/jdk-download/reuse/build.gradle | 12 - .../src/testKit/jdk-download/settings.gradle | 1 - .../testKit/jdk-download/subproj/build.gradle | 48 -- .../elasticsearch/client/IndicesClientIT.java | 3 + distribution/docker/src/docker/Dockerfile | 4 +- .../high-level/getting-started.asciidoc | 23 +- docs/painless/painless-contexts.asciidoc | 2 +- .../painless-field-context.asciidoc | 2 +- .../painless-guide/painless-datetime.asciidoc | 2 +- docs/plugins/mapper-size.asciidoc | 4 +- .../bucket/composite-aggregation.asciidoc | 3 + .../metrics/tophits-aggregation.asciidoc | 4 +- docs/reference/cat.asciidoc | 1 + .../change-mappings-and-settings.asciidoc | 174 +++---- .../data-streams/data-streams.asciidoc | 4 +- .../set-up-a-data-stream.asciidoc | 83 ++-- .../data-streams/use-a-data-stream.asciidoc | 159 +++---- docs/reference/how-to/search-speed.asciidoc | 2 +- .../images/data-streams/data-streams-list.png | Bin 16528 -> 55222 bytes .../reference/indices/rollover-index.asciidoc | 2 +- docs/reference/mapping/types/nested.asciidoc | 2 +- .../ml-configuring-transform.asciidoc | 2 +- docs/reference/ml/ml-shared.asciidoc | 2 +- .../modules/cross-cluster-search.asciidoc | 2 +- docs/reference/redirects.asciidoc | 24 +- docs/reference/scripting/fields.asciidoc | 4 +- docs/reference/scripting/security.asciidoc | 13 +- docs/reference/scripting/using.asciidoc | 2 +- docs/reference/search/request-body.asciidoc | 15 +- .../search/request/collapse.asciidoc | 2 +- .../search/request/inner-hits.asciidoc | 22 +- .../search/request/script-fields.asciidoc | 9 +- .../search/request/stored-fields.asciidoc | 11 +- docs/reference/search/search-fields.asciidoc | 21 +- .../search/search-your-data.asciidoc | 13 +- docs/reference/search/search.asciidoc | 4 +- docs/reference/setup/install/docker.asciidoc | 16 +- gradle/fips.gradle | 27 +- .../stats/MatrixStatsAggregatorTests.java | 47 +- .../ir/{BraceNode.java => AccessNode.java} | 4 +- .../elasticsearch/painless/ir/CallNode.java | 43 -- .../elasticsearch/painless/ir/DotNode.java | 63 --- .../elasticsearch/painless/node/ECall.java | 8 - .../phase/DefaultUserTreeToIRTreePhase.java | 112 +++-- .../painless/phase/IRTreeBaseVisitor.java | 16 +- .../painless/phase/IRTreeVisitor.java | 8 +- .../phase/PainlessUserTreeToIRTreePhase.java | 449 +++++++++--------- .../ChildrenToParentAggregatorTests.java | 6 +- .../ParentToChildrenAggregatorTests.java | 5 +- qa/os/build.gradle | 4 +- .../packaging/test/DebMetadataTests.java | 1 - .../allocation/decider/MockDiskUsagesIT.java | 4 + .../index/mapper/DateFieldMapper.java | 20 + .../index/mapper/GeoPointFieldMapper.java | 17 + .../index/mapper/MappedFieldType.java | 6 + .../query/DistanceFeatureQueryBuilder.java | 33 +- .../elasticsearch/search/SearchModule.java | 12 +- .../CompositeAggregationBuilder.java | 9 + .../bucket/composite/CompositeAggregator.java | 89 +--- .../CompositeValuesSourceBuilder.java | 39 +- .../CompositeValuesSourceConfig.java | 41 +- .../CompositeValuesSourceParserHelper.java | 11 +- .../DateHistogramValuesSourceBuilder.java | 99 +++- .../GeoTileGridValuesSourceBuilder.java | 99 +++- .../HistogramValuesSourceBuilder.java | 88 +++- .../composite/TermsValuesSourceBuilder.java | 163 ++++++- .../InternalVariableWidthHistogram.java | 4 +- .../GlobalOrdinalsStringTermsAggregator.java | 13 +- .../support/ValuesSourceRegistry.java | 123 ++++- .../composite/CompositeAggregatorTests.java | 265 ++++++++++- .../bucket/filter/FilterAggregatorTests.java | 24 +- .../bucket/filter/FiltersAggregatorTests.java | 58 +-- .../AutoDateHistogramAggregatorTests.java | 116 ++--- .../DateHistogramAggregatorTests.java | 133 ++---- .../NumericHistogramAggregatorTests.java | 122 +++-- .../RangeHistogramAggregatorTests.java | 90 ++-- ...VariableWidthHistogramAggregatorTests.java | 105 ++-- .../missing/MissingAggregatorTests.java | 18 +- .../bucket/nested/NestedAggregatorTests.java | 23 +- .../nested/ReverseNestedAggregatorTests.java | 8 +- .../bucket/range/IpRangeAggregatorTests.java | 6 +- .../sampler/DiversifiedSamplerTests.java | 4 +- .../terms/BinaryTermsAggregatorTests.java | 41 +- .../terms/KeywordTermsAggregatorTests.java | 39 +- .../terms/NumericTermsAggregatorTests.java | 43 +- .../terms/RareTermsAggregatorTests.java | 120 ++--- .../SignificantTermsAggregatorTests.java | 29 +- .../bucket/terms/TermsAggregatorTests.java | 8 +- .../metrics/ExtendedStatsAggregatorTests.java | 2 +- .../metrics/GeoBoundsAggregatorTests.java | 10 +- .../metrics/GeoCentroidAggregatorTests.java | 10 +- .../HDRPercentileRanksAggregatorTests.java | 4 +- .../ScriptedMetricAggregatorTests.java | 37 +- .../metrics/StatsAggregatorTests.java | 4 +- .../metrics/SumAggregatorTests.java | 2 +- ...TDigestPercentileRanksAggregatorTests.java | 4 +- .../pipeline/DerivativeAggregatorTests.java | 20 - .../pipeline/MovFnAggrgatorTests.java | 4 - .../support/ValuesSourceRegistryTests.java | 7 +- .../CompressibleBytesOutputStreamTests.java | 6 +- .../aggregations/AggregatorTestCase.java | 151 +++--- .../authorization/alias-privileges.asciidoc | 37 +- ...regatedPercentileRanksAggregatorTests.java | 2 +- .../topmetrics/TopMetricsAggregatorTests.java | 2 +- .../pivot/TermsGroupSourceTests.java | 5 - .../dataframe/inference/InferenceRunner.java | 5 +- .../job/RollupIndexerIndexingTests.java | 2 +- .../mapper/ScriptDateMappedFieldType.java | 26 + .../LongScriptFieldDistanceFeatureQuery.java | 208 ++++++++ ...AbstractScriptMappedFieldTypeTestCase.java | 5 + .../ScriptDateMappedFieldTypeTests.java | 53 ++- .../GeoShapeCentroidAggregatorTests.java | 12 +- .../GeoShapeBoundsAggregatorTests.java | 10 +- .../xpack/sql/querydsl/agg/GroupByKey.java | 16 +- .../sql/planner/QueryTranslatorTests.java | 32 +- .../continuous/DataHistogramGroupByIT.java | 2 + .../continuous/TermsGroupByIT.java | 2 + 130 files changed, 2998 insertions(+), 2612 deletions(-) create mode 100644 buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy create mode 100644 buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/WiremockFixture.groovy delete mode 100644 buildSrc/src/integTest/java/org/elasticsearch/gradle/AdoptOpenJdkDownloadPluginIT.java delete mode 100644 buildSrc/src/integTest/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java delete mode 100644 buildSrc/src/integTest/java/org/elasticsearch/gradle/OpenJdkDownloadPluginIT.java delete mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/tar/SymbolicLinkPreservingUntarTask.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnpackTransform.java create mode 100644 buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnzipTransform.java delete mode 100644 buildSrc/src/testKit/jdk-download/build.gradle delete mode 100644 buildSrc/src/testKit/jdk-download/reuse/build.gradle delete mode 100644 buildSrc/src/testKit/jdk-download/settings.gradle delete mode 100644 buildSrc/src/testKit/jdk-download/subproj/build.gradle rename modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/{BraceNode.java => AccessNode.java} (95%) delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java create mode 100644 x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy new file mode 100644 index 0000000000000..090feece45006 --- /dev/null +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/JdkDownloadPluginFuncTest.groovy @@ -0,0 +1,237 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle + +import com.github.tomakehurst.wiremock.WireMockServer +import org.elasticsearch.gradle.fixtures.AbstractGradleFuncTest +import org.elasticsearch.gradle.fixtures.WiremockFixture +import org.elasticsearch.gradle.transform.SymbolicLinkPreservingUntarTransform +import org.elasticsearch.gradle.transform.UnzipTransform +import spock.lang.Unroll + +import java.nio.file.Files +import java.nio.file.Path +import java.nio.file.Paths +import java.util.regex.Matcher +import java.util.regex.Pattern + +import static org.elasticsearch.gradle.JdkDownloadPlugin.VENDOR_ADOPTOPENJDK +import static org.elasticsearch.gradle.JdkDownloadPlugin.VENDOR_OPENJDK + +class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { + + private static final String OPENJDK_VERSION_OLD = "1+99" + private static final String ADOPT_JDK_VERSION = "12.0.2+10" + private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" + private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)"); + + @Unroll + def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { + given: + def mockRepoUrl = urlPath(jdkVendor, jdkVersion, platform); + def mockedContent = filebytes(jdkVendor, platform) + buildFile.text = """ + plugins { + id 'elasticsearch.jdk-download' + } + + jdks { + myJdk { + vendor = '$jdkVendor' + version = '$jdkVersion' + platform = "$platform" + architecture = "x64" + } + } + + tasks.register("getJdk") { + dependsOn jdks.myJdk + doLast { + println "JDK HOME: " + jdks.myJdk + } + } + """ + + when: + def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server -> + buildFile << repositoryMockSetup(server, jdkVendor, jdkVersion) + gradleRunner("getJdk").build() + } + + then: + assertExtraction(result.output, expectedJavaBin); + + where: + platform | jdkVendor | jdkVersion | expectedJavaBin | suffix + "linux" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" | "" + "linux" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" + "linux" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" + "windows" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" | "" + "windows" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" + "windows" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" + "darwin" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" + "darwin" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" + "darwin" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" + } + + def "transforms are reused across projects"() { + given: + def mockRepoUrl = urlPath(jdkVendor, jdkVersion, platform) + def mockedContent = filebytes(jdkVendor, platform) + 10.times { + settingsFile << """ + include ':sub-$it' + """ + } + buildFile.text = """ + plugins { + id 'elasticsearch.jdk-download' apply false + } + + subprojects { + apply plugin: 'elasticsearch.jdk-download' + + jdks { + myJdk { + vendor = '$jdkVendor' + version = '$jdkVersion' + platform = "$platform" + architecture = "x64" + } + } + tasks.register("getJdk") { + dependsOn jdks.myJdk + doLast { + println "JDK HOME: " + jdks.myJdk + } + } + } + """ + + when: + def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server -> + buildFile << repositoryMockSetup(server, jdkVendor, jdkVersion) + gradleRunner('getJdk', '-i', '-g', testProjectDir.newFolder().toString()).build() + } + + then: + result.tasks.size() == 10 + result.output.count("Unpacking linux-12.0.2-x64.tar.gz using SymbolicLinkPreservingUntarTransform.") == 1 + + where: + platform | jdkVendor | jdkVersion | expectedJavaBin + "linux" | VENDOR_ADOPTOPENJDK | ADOPT_JDK_VERSION | "bin/java" + } + + @Unroll + def "transforms of type #transformType are kept across builds"() { + given: + def mockRepoUrl = urlPath(VENDOR_ADOPTOPENJDK, ADOPT_JDK_VERSION, platform) + def mockedContent = filebytes(VENDOR_ADOPTOPENJDK, platform) + buildFile.text = """ + plugins { + id 'elasticsearch.jdk-download' + } + + apply plugin: 'elasticsearch.jdk-download' + + jdks { + myJdk { + vendor = '$VENDOR_ADOPTOPENJDK' + version = '$ADOPT_JDK_VERSION' + platform = "$platform" + architecture = "x64" + } + } + + tasks.register("getJdk") { + dependsOn jdks.myJdk + doLast { + println "JDK HOME: " + jdks.myJdk + } + } + """ + + when: + def result = WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server -> + buildFile << repositoryMockSetup(server, VENDOR_ADOPTOPENJDK, ADOPT_JDK_VERSION) + + def commonGradleUserHome = testProjectDir.newFolder().toString() + // initial run + gradleRunner('getJdk', '-g', commonGradleUserHome).build() + // run against up-to-date transformations + gradleRunner('getJdk', '-i', '-g', commonGradleUserHome).build() + } + + then: + assertOutputContains(result.output, "Skipping $transformType") + + where: + platform | transformType + "linux" | SymbolicLinkPreservingUntarTransform.class.simpleName + "windows" | UnzipTransform.class.simpleName + } + + static boolean assertExtraction(String output, String javaBin) { + Matcher matcher = JDK_HOME_LOGLINE.matcher(output); + assert matcher.find() == true; + String jdkHome = matcher.group(1); + Path javaPath = Paths.get(jdkHome, javaBin); + assert Files.exists(javaPath) == true; + true + } + + private static String urlPath(final String vendor, final String version, final String platform) { + if (vendor.equals(VENDOR_ADOPTOPENJDK)) { + final String module = platform.equals("darwin") ? "mac" : platform; + return "/jdk-12.0.2+10/" + module + "/x64/jdk/hotspot/normal/adoptopenjdk"; + } else if (vendor.equals(VENDOR_OPENJDK)) { + final String effectivePlatform = platform.equals("darwin") ? "osx" : platform; + final boolean isOld = version.equals(OPENJDK_VERSION_OLD); + final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; + final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + effectivePlatform + "-x64_bin." + extension(platform); + return "/java/GA/" + versionPath + "/GPL/" + filename; + } + } + + private static byte[] filebytes(final String vendor, final String platform) throws IOException { + final String effectivePlatform = platform.equals("darwin") ? "osx" : platform; + if (vendor.equals(VENDOR_ADOPTOPENJDK)) { + return JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_adoptopenjdk_" + effectivePlatform + "." + extension(platform)).getBytes() + } else if (vendor.equals(VENDOR_OPENJDK)) { + JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_openjdk_" + effectivePlatform + "." + extension(platform)).getBytes() + } + } + + private static String extension(String platform) { + platform.equals("windows") ? "zip" : "tar.gz"; + } + + private static String repositoryMockSetup(WireMockServer server, String jdkVendor, String jdkVersion) { + """allprojects{ p -> + // wire the jdk repo to wiremock + p.repositories.all { repo -> + if(repo.name == "jdk_repo_${jdkVendor}_${jdkVersion}") { + repo.setUrl('${server.baseUrl()}') + } + } + }""" + } +} diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy index a256ace4f3a1b..be4988d92912f 100644 --- a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/AbstractGradleFuncTest.groovy @@ -36,7 +36,7 @@ abstract class AbstractGradleFuncTest extends Specification{ def setup() { settingsFile = testProjectDir.newFile('settings.gradle') - settingsFile << "rootProject.name = 'hello-world'" + settingsFile << "rootProject.name = 'hello-world'\n" buildFile = testProjectDir.newFile('build.gradle') } diff --git a/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/WiremockFixture.groovy b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/WiremockFixture.groovy new file mode 100644 index 0000000000000..d1725bb360cff --- /dev/null +++ b/buildSrc/src/integTest/groovy/org/elasticsearch/gradle/fixtures/WiremockFixture.groovy @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.fixtures + +import com.github.tomakehurst.wiremock.WireMockServer +import org.gradle.testkit.runner.BuildResult + +import static com.github.tomakehurst.wiremock.client.WireMock.aResponse +import static com.github.tomakehurst.wiremock.client.WireMock.get +import static com.github.tomakehurst.wiremock.client.WireMock.head +import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo + +/** + * A test fixture that allows running testkit builds with wiremock + * */ +class WiremockFixture { + + /** + * the buildRunClosure has passed an instance of WireMockServer that can be used to access e.g. the baseUrl of + * the configured server: + * + *
+     *  WiremockFixture.withWireMock(mockRepoUrl, mockedContent) { server ->
+     *      buildFile << """
+     *          // wire a gradle repository with wiremock
+*               repositories {
+     *              maven {
+     *                 url = '${server.baseUrl()}'
+     *              }
+     *          }
+     *      }
+     *      gadleRunner('myTask').build()
+     * 
+ * */ + static BuildResult withWireMock(String expectedUrl, byte[] expectedContent, Closure buildRunClosure) { + WireMockServer wireMock = new WireMockServer(0); + try { + wireMock.stubFor(head(urlEqualTo(expectedUrl)).willReturn(aResponse().withStatus(200))); + wireMock.stubFor( + get(urlEqualTo(expectedUrl)).willReturn(aResponse().withStatus(200).withBody(expectedContent)) + ) + wireMock.start(); + return buildRunClosure.call(wireMock); + } catch (Exception e) { + // for debugging + System.err.println("missed requests: " + wireMock.findUnmatchedRequests().getRequests()); + throw e; + } finally { + wireMock.stop(); + } + } + +} diff --git a/buildSrc/src/integTest/java/org/elasticsearch/gradle/AdoptOpenJdkDownloadPluginIT.java b/buildSrc/src/integTest/java/org/elasticsearch/gradle/AdoptOpenJdkDownloadPluginIT.java deleted file mode 100644 index 866be326a51cf..0000000000000 --- a/buildSrc/src/integTest/java/org/elasticsearch/gradle/AdoptOpenJdkDownloadPluginIT.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle; - -import java.io.IOException; -import java.io.InputStream; - -public class AdoptOpenJdkDownloadPluginIT extends JdkDownloadPluginIT { - - @Override - public String jdkVersion() { - return "12.0.2+10"; - } - - @Override - public String jdkVendor() { - return "adoptopenjdk"; - } - - @Override - protected String urlPath(final String version, final String platform, final String extension) { - final String module = platform.equals("osx") ? "mac" : platform; - return "/jdk-12.0.2+10/" + module + "/x64/jdk/hotspot/normal/adoptopenjdk"; - } - - @Override - protected byte[] filebytes(final String platform, final String extension) throws IOException { - try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream("fake_adoptopenjdk_" + platform + "." + extension)) { - return stream.readAllBytes(); - } - } - -} diff --git a/buildSrc/src/integTest/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java b/buildSrc/src/integTest/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java deleted file mode 100644 index 820c64b51dd55..0000000000000 --- a/buildSrc/src/integTest/java/org/elasticsearch/gradle/JdkDownloadPluginIT.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle; - -import com.github.tomakehurst.wiremock.WireMockServer; -import org.elasticsearch.gradle.test.GradleIntegrationTestCase; -import org.gradle.testkit.runner.BuildResult; -import org.gradle.testkit.runner.GradleRunner; - -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.function.Consumer; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; -import static com.github.tomakehurst.wiremock.client.WireMock.get; -import static com.github.tomakehurst.wiremock.client.WireMock.head; -import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo; -import static org.hamcrest.CoreMatchers.equalTo; - -public abstract class JdkDownloadPluginIT extends GradleIntegrationTestCase { - - private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)"); - private static final Pattern NUM_CONFIGS_LOGLINE = Pattern.compile("NUM CONFIGS: (.*)"); - - protected abstract String jdkVersion(); - - protected abstract String jdkVendor(); - - public final void testLinuxExtraction() throws IOException { - assertExtraction("getLinuxJdk", "linux", "bin/java", jdkVendor(), jdkVersion()); - } - - public final void testDarwinExtraction() throws IOException { - assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", jdkVendor(), jdkVersion()); - } - - public final void testWindowsExtraction() throws IOException { - assertExtraction("getWindowsJdk", "windows", "bin/java", jdkVendor(), jdkVersion()); - } - - public final void testCrossProjectReuse() throws IOException { - runBuild("numConfigurations", "linux", result -> { - Matcher matcher = NUM_CONFIGS_LOGLINE.matcher(result.getOutput()); - assertTrue("could not find num configs in output: " + result.getOutput(), matcher.find()); - assertThat(Integer.parseInt(matcher.group(1)), equalTo(6)); // 3 import configs, 3 export configs - }, jdkVendor(), jdkVersion()); - } - - protected void assertExtraction(String taskname, String platform, String javaBin, String vendor, String version) throws IOException { - runBuild(taskname, platform, result -> { - Matcher matcher = JDK_HOME_LOGLINE.matcher(result.getOutput()); - assertTrue("could not find jdk home in output: " + result.getOutput(), matcher.find()); - String jdkHome = matcher.group(1); - Path javaPath = Paths.get(jdkHome, javaBin); - assertTrue(javaPath.toString(), Files.exists(javaPath)); - }, vendor, version); - } - - protected abstract String urlPath(String version, String platform, String extension); - - protected abstract byte[] filebytes(String platform, String extension) throws IOException; - - private void runBuild(String taskname, String platform, Consumer assertions, String vendor, String version) - throws IOException { - WireMockServer wireMock = new WireMockServer(0); - try { - String extension = platform.equals("windows") ? "zip" : "tar.gz"; - - wireMock.stubFor(head(urlEqualTo(urlPath(version, platform, extension))).willReturn(aResponse().withStatus(200))); - wireMock.stubFor( - get(urlEqualTo(urlPath(version, platform, extension))).willReturn( - aResponse().withStatus(200).withBody(filebytes(platform, extension)) - ) - ); - wireMock.start(); - - GradleRunner runner = GradleRunner.create() - .withProjectDir(getProjectDir("jdk-download")) - .withArguments( - taskname, - "-Dtests.jdk_vendor=" + vendor, - "-Dtests.jdk_version=" + version, - "-Dtests.jdk_repo=" + wireMock.baseUrl(), - "-i" - ) - .withPluginClasspath(); - - BuildResult result = runner.build(); - assertions.accept(result); - } catch (Exception e) { - // for debugging - System.err.println("missed requests: " + wireMock.findUnmatchedRequests().getRequests()); - throw e; - } finally { - wireMock.stop(); - } - } -} diff --git a/buildSrc/src/integTest/java/org/elasticsearch/gradle/OpenJdkDownloadPluginIT.java b/buildSrc/src/integTest/java/org/elasticsearch/gradle/OpenJdkDownloadPluginIT.java deleted file mode 100644 index e085a4564adef..0000000000000 --- a/buildSrc/src/integTest/java/org/elasticsearch/gradle/OpenJdkDownloadPluginIT.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle; - -import java.io.IOException; -import java.io.InputStream; - -public class OpenJdkDownloadPluginIT extends JdkDownloadPluginIT { - - public String oldJdkVersion() { - return "1+99"; - } - - @Override - public String jdkVersion() { - return "12.0.1+99@123456789123456789123456789abcde"; - } - - @Override - protected String jdkVendor() { - return "openjdk"; - } - - @Override - protected String urlPath(final String version, final String platform, final String extension) { - final boolean isOld = version.equals(oldJdkVersion()); - final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; - final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + platform + "-x64_bin." + extension; - return "/java/GA/" + versionPath + "/GPL/" + filename; - } - - @Override - protected byte[] filebytes(final String platform, final String extension) throws IOException { - try (InputStream stream = JdkDownloadPluginIT.class.getResourceAsStream("fake_openjdk_" + platform + "." + extension)) { - return stream.readAllBytes(); - } - } - - public final void testLinuxExtractionOldVersion() throws IOException { - assertExtraction("getLinuxJdk", "linux", "bin/java", jdkVendor(), oldJdkVersion()); - } - - public final void testDarwinExtractionOldVersion() throws IOException { - assertExtraction("getDarwinJdk", "osx", "Contents/Home/bin/java", jdkVendor(), oldJdkVersion()); - } - - public final void testWindowsExtractionOldVersion() throws IOException { - assertExtraction("getWindowsJdk", "windows", "bin/java", jdkVendor(), oldJdkVersion()); - } - -} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java index b52fb363d2002..4a664dac7f84b 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java @@ -19,261 +19,126 @@ package org.elasticsearch.gradle; -import org.elasticsearch.gradle.tar.SymbolicLinkPreservingUntarTask; -import org.gradle.api.Action; +import org.elasticsearch.gradle.transform.SymbolicLinkPreservingUntarTransform; +import org.elasticsearch.gradle.transform.UnzipTransform; import org.gradle.api.GradleException; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; -import org.gradle.api.artifacts.ConfigurationContainer; -import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.dsl.RepositoryHandler; import org.gradle.api.artifacts.repositories.IvyArtifactRepository; -import org.gradle.api.file.CopySpec; -import org.gradle.api.file.Directory; -import org.gradle.api.file.FileTree; -import org.gradle.api.file.RelativePath; -import org.gradle.api.provider.Provider; -import org.gradle.api.tasks.Copy; -import org.gradle.api.tasks.TaskProvider; - -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.Callable; -import java.util.stream.StreamSupport; - -import static org.elasticsearch.gradle.util.GradleUtils.findByName; -import static org.elasticsearch.gradle.util.GradleUtils.maybeCreate; +import org.gradle.api.artifacts.type.ArtifactTypeDefinition; +import org.gradle.api.internal.artifacts.ArtifactAttributes; public class JdkDownloadPlugin implements Plugin { + public static final String VENDOR_ADOPTOPENJDK = "adoptopenjdk"; + public static final String VENDOR_OPENJDK = "openjdk"; + private static final String REPO_NAME_PREFIX = "jdk_repo_"; private static final String EXTENSION_NAME = "jdks"; @Override public void apply(Project project) { - NamedDomainObjectContainer jdksContainer = project.container( - Jdk.class, - name -> new Jdk(name, project.getConfigurations().create("jdk_" + name), project.getObjects()) - ); - project.getExtensions().add(EXTENSION_NAME, jdksContainer); - - project.afterEvaluate(p -> { - for (Jdk jdk : jdksContainer) { - jdk.finalizeValues(); - - // depend on the jdk directory "artifact" from the root project - DependencyHandler dependencies = project.getDependencies(); - Map depConfig = new HashMap<>(); - depConfig.put("path", ":"); // root project - depConfig.put( - "configuration", - configName("extracted_jdk", jdk.getVendor(), jdk.getVersion(), jdk.getPlatform(), jdk.getArchitecture()) - ); - project.getDependencies().add(jdk.getConfigurationName(), dependencies.project(depConfig)); + project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); + project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { + transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + }); - // ensure a root level jdk download task exists - setupRootJdkDownload(project.getRootProject(), jdk); - } + ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); + project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { + transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()); + transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); }); - } - @SuppressWarnings("unchecked") - public static NamedDomainObjectContainer getContainer(Project project) { - return (NamedDomainObjectContainer) project.getExtensions().getByName(EXTENSION_NAME); + NamedDomainObjectContainer jdksContainer = project.container(Jdk.class, name -> { + Configuration configuration = project.getConfigurations().create("jdk_" + name); + configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + Jdk jdk = new Jdk(name, configuration, project.getObjects()); + configuration.defaultDependencies(dependencies -> { + jdk.finalizeValues(); + setupRepository(project, jdk); + dependencies.add(project.getDependencies().create(dependencyNotation(jdk))); + }); + return jdk; + }); + project.getExtensions().add(EXTENSION_NAME, jdksContainer); } - private static void setupRootJdkDownload(Project rootProject, Jdk jdk) { - String extractTaskName = String.format( - Locale.ROOT, - "extract-%s-%s-jdk-%s-%s", - jdk.getPlatform(), - jdk.getArchitecture(), - jdk.getVendor(), - jdk.getVersion() - ); - - // Skip setup if we've already configured a JDK for this platform, vendor and version - if (findByName(rootProject.getTasks(), extractTaskName) == null) { - RepositoryHandler repositories = rootProject.getRepositories(); - - /* - * Define the appropriate repository for the given JDK vendor and version - * - * For Oracle/OpenJDK/AdoptOpenJDK we define a repository per-version. - */ - String repoName = REPO_NAME_PREFIX + jdk.getVendor() + "_" + jdk.getVersion(); - String repoUrl; - String artifactPattern; - - if (jdk.getVendor().equals("adoptopenjdk")) { - repoUrl = "https://api.adoptopenjdk.net/v3/binary/version/"; - if (jdk.getMajor().equals("8")) { - // legacy pattern for JDK 8 - artifactPattern = "jdk" - + jdk.getBaseVersion() - + "-" - + jdk.getBuild() - + "/[module]/[classifier]/jdk/hotspot/normal/adoptopenjdk"; - } else { - // current pattern since JDK 9 - artifactPattern = "jdk-" - + jdk.getBaseVersion() - + "+" - + jdk.getBuild() - + "/[module]/[classifier]/jdk/hotspot/normal/adoptopenjdk"; - } - } else if (jdk.getVendor().equals("openjdk")) { - repoUrl = "https://download.oracle.com"; - if (jdk.getHash() != null) { - // current pattern since 12.0.1 - artifactPattern = "java/GA/jdk" - + jdk.getBaseVersion() - + "/" - + jdk.getHash() - + "/" - + jdk.getBuild() - + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; - } else { - // simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back - artifactPattern = "java/GA/jdk" - + jdk.getMajor() - + "/" - + jdk.getBuild() - + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; - } + private void setupRepository(Project project, Jdk jdk) { + RepositoryHandler repositories = project.getRepositories(); + + /* + * Define the appropriate repository for the given JDK vendor and version + * + * For Oracle/OpenJDK/AdoptOpenJDK we define a repository per-version. + */ + String repoName = REPO_NAME_PREFIX + jdk.getVendor() + "_" + jdk.getVersion(); + String repoUrl; + String artifactPattern; + + if (jdk.getVendor().equals(VENDOR_ADOPTOPENJDK)) { + repoUrl = "https://api.adoptopenjdk.net/v3/binary/version/"; + if (jdk.getMajor().equals("8")) { + // legacy pattern for JDK 8 + artifactPattern = "jdk" + + jdk.getBaseVersion() + + "-" + + jdk.getBuild() + + "/[module]/[classifier]/jdk/hotspot/normal/adoptopenjdk"; } else { - throw new GradleException("Unknown JDK vendor [" + jdk.getVendor() + "]"); + // current pattern since JDK 9 + artifactPattern = "jdk-" + + jdk.getBaseVersion() + + "+" + + jdk.getBuild() + + "/[module]/[classifier]/jdk/hotspot/normal/adoptopenjdk"; } - - // Define the repository if we haven't already - if (repositories.findByName(repoName) == null) { - IvyArtifactRepository ivyRepo = repositories.ivy(repo -> { - repo.setName(repoName); - repo.setUrl(repoUrl); - repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); - repo.patternLayout(layout -> layout.artifact(artifactPattern)); - }); - repositories.exclusiveContent(exclusiveContentRepository -> { - exclusiveContentRepository.filter(config -> config.includeGroup(groupName(jdk))); - exclusiveContentRepository.forRepositories(ivyRepo); - }); + } else if (jdk.getVendor().equals(VENDOR_OPENJDK)) { + repoUrl = "https://download.oracle.com"; + if (jdk.getHash() != null) { + // current pattern since 12.0.1 + artifactPattern = "java/GA/jdk" + + jdk.getBaseVersion() + + "/" + + jdk.getHash() + + "/" + + jdk.getBuild() + + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; + } else { + // simpler legacy pattern from JDK 9 to JDK 12 that we are advocating to Oracle to bring back + artifactPattern = "java/GA/jdk" + + jdk.getMajor() + + "/" + + jdk.getBuild() + + "/GPL/openjdk-[revision]_[module]-[classifier]_bin.[ext]"; } - - // Declare a configuration and dependency from which to download the remote JDK - final ConfigurationContainer configurations = rootProject.getConfigurations(); - String downloadConfigName = configName(jdk.getVendor(), jdk.getVersion(), jdk.getPlatform(), jdk.getArchitecture()); - Configuration downloadConfiguration = maybeCreate(configurations, downloadConfigName); - rootProject.getDependencies().add(downloadConfigName, dependencyNotation(jdk)); - - // Create JDK extract task - final Provider extractPath = rootProject.getLayout() - .getBuildDirectory() - .dir("jdks/" + jdk.getVendor() + "-" + jdk.getBaseVersion() + "_" + jdk.getPlatform() + "_" + jdk.getArchitecture()); - - TaskProvider extractTask = createExtractTask( - extractTaskName, - rootProject, - jdk.getPlatform(), - downloadConfiguration, - extractPath - ); - - // Declare a configuration for the extracted JDK archive - String artifactConfigName = configName( - "extracted_jdk", - jdk.getVendor(), - jdk.getVersion(), - jdk.getPlatform(), - jdk.getArchitecture() - ); - maybeCreate(configurations, artifactConfigName); - rootProject.getArtifacts().add(artifactConfigName, extractPath, artifact -> artifact.builtBy(extractTask)); + } else { + throw new GradleException("Unknown JDK vendor [" + jdk.getVendor() + "]"); } - } - - private static TaskProvider createExtractTask( - String taskName, - Project rootProject, - String platform, - Configuration downloadConfiguration, - Provider extractPath - ) { - if (platform.equals("windows")) { - final Callable fileGetter = () -> rootProject.zipTree(downloadConfiguration.getSingleFile()); - // TODO: look into doing this as an artifact transform, which are cacheable starting in gradle 5.3 - Action removeRootDir = copy -> { - // remove extra unnecessary directory levels - copy.eachFile(details -> { - Path newPathSegments = trimArchiveExtractPath(details.getRelativePath().getPathString()); - String[] segments = StreamSupport.stream(newPathSegments.spliterator(), false) - .map(Path::toString) - .toArray(String[]::new); - details.setRelativePath(new RelativePath(true, segments)); - }); - copy.setIncludeEmptyDirs(false); - }; - return rootProject.getTasks().register(taskName, Copy.class, copyTask -> { - copyTask.doFirst(new Action() { - @Override - public void execute(Task t) { - rootProject.delete(extractPath); - } - }); - copyTask.into(extractPath); - copyTask.from(fileGetter, removeRootDir); - }); - } else { - /* - * Gradle TarFileTree does not resolve symlinks, so we have to manually extract and preserve the symlinks. - * cf. https://github.com/gradle/gradle/issues/3982 and https://discuss.gradle.org/t/tar-and-untar-losing-symbolic-links/2039 - */ - return rootProject.getTasks().register(taskName, SymbolicLinkPreservingUntarTask.class, task -> { - task.getTarFile().fileProvider(rootProject.provider(downloadConfiguration::getSingleFile)); - task.getExtractPath().set(extractPath); - task.setTransform(JdkDownloadPlugin::trimArchiveExtractPath); + // Define the repository if we haven't already + if (repositories.findByName(repoName) == null) { + repositories.ivy(repo -> { + repo.setName(repoName); + repo.setUrl(repoUrl); + repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + repo.patternLayout(layout -> layout.artifact(artifactPattern)); + repo.content(repositoryContentDescriptor -> repositoryContentDescriptor.includeGroup(groupName(jdk))); }); } } - /* - * We want to remove up to the and including the jdk-.* relative paths. That is a JDK archive is structured as: - * jdk-12.0.1/ - * jdk-12.0.1/Contents - * ... - * - * and we want to remove the leading jdk-12.0.1. Note however that there could also be a leading ./ as in - * ./ - * ./jdk-12.0.1/ - * ./jdk-12.0.1/Contents - * - * so we account for this and search the path components until we find the jdk-12.0.1, and strip the leading components. - */ - private static Path trimArchiveExtractPath(String relativePath) { - final Path entryName = Paths.get(relativePath); - int index = 0; - for (; index < entryName.getNameCount(); index++) { - if (entryName.getName(index).toString().matches("jdk-?\\d.*")) { - break; - } - } - if (index + 1 >= entryName.getNameCount()) { - // this happens on the top-level directories in the archive, which we are removing - return null; - } - // finally remove the top-level directories from the output path - return entryName.subpath(index + 1, entryName.getNameCount()); + @SuppressWarnings("unchecked") + public static NamedDomainObjectContainer getContainer(Project project) { + return (NamedDomainObjectContainer) project.getExtensions().getByName(EXTENSION_NAME); } private static String dependencyNotation(Jdk jdk) { String platformDep = jdk.getPlatform().equals("darwin") || jdk.getPlatform().equals("osx") - ? (jdk.getVendor().equals("adoptopenjdk") ? "mac" : "osx") + ? (jdk.getVendor().equals(VENDOR_ADOPTOPENJDK) ? "mac" : "osx") : jdk.getPlatform(); String extension = jdk.getPlatform().equals("windows") ? "zip" : "tar.gz"; @@ -284,7 +149,4 @@ private static String groupName(Jdk jdk) { return jdk.getVendor() + "_" + jdk.getMajor(); } - private static String configName(String... parts) { - return String.join("_", parts); - } } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/tar/SymbolicLinkPreservingUntarTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/tar/SymbolicLinkPreservingUntarTask.java deleted file mode 100644 index 7b35531cb1efd..0000000000000 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/tar/SymbolicLinkPreservingUntarTask.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.gradle.tar; - -import org.apache.commons.compress.archivers.tar.TarArchiveEntry; -import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; -import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; -import org.gradle.api.DefaultTask; -import org.gradle.api.GradleException; -import org.gradle.api.file.DirectoryProperty; -import org.gradle.api.file.RegularFileProperty; -import org.gradle.api.model.ObjectFactory; -import org.gradle.api.tasks.InputFile; -import org.gradle.api.tasks.Internal; -import org.gradle.api.tasks.OutputDirectory; -import org.gradle.api.tasks.TaskAction; - -import javax.inject.Inject; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.attribute.PosixFileAttributeView; -import java.nio.file.attribute.PosixFilePermission; -import java.nio.file.attribute.PosixFilePermissions; -import java.util.Set; -import java.util.function.Function; - -/** - * A custom task that explodes a tar archive that preserves symbolic links. - * - * This task is necessary because the built-in task {@link org.gradle.api.internal.file.archive.TarFileTree} does not preserve symbolic - * links. - */ -public class SymbolicLinkPreservingUntarTask extends DefaultTask { - - private final RegularFileProperty tarFile; - - @InputFile - public RegularFileProperty getTarFile() { - return tarFile; - } - - private final DirectoryProperty extractPath; - - @OutputDirectory - public DirectoryProperty getExtractPath() { - return extractPath; - } - - private Function transform; - - @Internal - public Function getTransform() { - return transform; - } - - /** - * A transform to apply to the tar entry, to derive the relative path from the entry name. If the return value is null, the entry is - * dropped from the exploded tar archive. - * - * @param transform the transform - */ - public void setTransform(Function transform) { - this.transform = transform; - } - - @Inject - public SymbolicLinkPreservingUntarTask(final ObjectFactory objectFactory) { - this.tarFile = objectFactory.fileProperty(); - this.extractPath = objectFactory.directoryProperty(); - this.transform = name -> Paths.get(name); - } - - @TaskAction - final void execute() { - // ensure the target extraction path is empty - getProject().delete(extractPath); - try ( - TarArchiveInputStream tar = new TarArchiveInputStream( - new GzipCompressorInputStream(new FileInputStream(tarFile.getAsFile().get())) - ) - ) { - final Path destinationPath = extractPath.get().getAsFile().toPath(); - TarArchiveEntry entry = tar.getNextTarEntry(); - while (entry != null) { - final Path relativePath = transform.apply(entry.getName()); - if (relativePath == null) { - entry = tar.getNextTarEntry(); - continue; - } - - final Path destination = destinationPath.resolve(relativePath); - final Path parent = destination.getParent(); - if (Files.exists(parent) == false) { - Files.createDirectories(parent); - } - if (entry.isDirectory()) { - Files.createDirectory(destination); - } else if (entry.isSymbolicLink()) { - Files.createSymbolicLink(destination, Paths.get(entry.getLinkName())); - } else { - // copy the file from the archive using a small buffer to avoid heaping - Files.createFile(destination); - try (FileOutputStream fos = new FileOutputStream(destination.toFile())) { - tar.transferTo(fos); - } - } - if (entry.isSymbolicLink() == false) { - // check if the underlying file system supports POSIX permissions - final PosixFileAttributeView view = Files.getFileAttributeView(destination, PosixFileAttributeView.class); - if (view != null) { - final Set permissions = PosixFilePermissions.fromString( - permissions((entry.getMode() >> 6) & 07) + permissions((entry.getMode() >> 3) & 07) + permissions( - (entry.getMode() >> 0) & 07 - ) - ); - Files.setPosixFilePermissions(destination, permissions); - } - } - entry = tar.getNextTarEntry(); - } - } catch (final IOException e) { - throw new GradleException("unable to extract tar [" + tarFile.getAsFile().get().toPath() + "]", e); - } - } - - private String permissions(final int permissions) { - if (permissions < 0 || permissions > 7) { - throw new IllegalArgumentException("permissions [" + permissions + "] out of range"); - } - final StringBuilder sb = new StringBuilder(3); - if ((permissions & 4) == 4) { - sb.append('r'); - } else { - sb.append('-'); - } - if ((permissions & 2) == 2) { - sb.append('w'); - } else { - sb.append('-'); - } - if ((permissions & 1) == 1) { - sb.append('x'); - } else { - sb.append('-'); - } - return sb.toString(); - } - -} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java b/buildSrc/src/main/java/org/elasticsearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java new file mode 100644 index 0000000000000..20754d077c731 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/transform/SymbolicLinkPreservingUntarTransform.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.transform; + +import org.apache.commons.compress.archivers.tar.TarArchiveEntry; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.gradle.api.logging.Logging; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFileAttributeView; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Set; + +public abstract class SymbolicLinkPreservingUntarTransform implements UnpackTransform { + + public void unpack(File tarFile, File targetDir) throws IOException { + Logging.getLogger(SymbolicLinkPreservingUntarTransform.class) + .info("Unpacking " + tarFile.getName() + " using " + SymbolicLinkPreservingUntarTransform.class.getSimpleName() + "."); + + TarArchiveInputStream tar = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(tarFile))); + final Path destinationPath = targetDir.toPath(); + TarArchiveEntry entry = tar.getNextTarEntry(); + while (entry != null) { + final Path relativePath = UnpackTransform.trimArchiveExtractPath(entry.getName()); + if (relativePath == null) { + entry = tar.getNextTarEntry(); + continue; + } + + final Path destination = destinationPath.resolve(relativePath); + final Path parent = destination.getParent(); + if (Files.exists(parent) == false) { + Files.createDirectories(parent); + } + if (entry.isDirectory()) { + Files.createDirectory(destination); + } else if (entry.isSymbolicLink()) { + Files.createSymbolicLink(destination, Paths.get(entry.getLinkName())); + } else { + // copy the file from the archive using a small buffer to avoid heaping + Files.createFile(destination); + try (FileOutputStream fos = new FileOutputStream(destination.toFile())) { + tar.transferTo(fos); + } + } + if (entry.isSymbolicLink() == false) { + // check if the underlying file system supports POSIX permissions + final PosixFileAttributeView view = Files.getFileAttributeView(destination, PosixFileAttributeView.class); + if (view != null) { + final Set permissions = PosixFilePermissions.fromString( + permissions((entry.getMode() >> 6) & 07) + permissions((entry.getMode() >> 3) & 07) + permissions( + (entry.getMode() >> 0) & 07 + ) + ); + Files.setPosixFilePermissions(destination, permissions); + } + } + entry = tar.getNextTarEntry(); + } + + } + + private static String permissions(final int permissions) { + if (permissions < 0 || permissions > 7) { + throw new IllegalArgumentException("permissions [" + permissions + "] out of range"); + } + final StringBuilder sb = new StringBuilder(3); + if ((permissions & 4) == 4) { + sb.append('r'); + } else { + sb.append('-'); + } + if ((permissions & 2) == 2) { + sb.append('w'); + } else { + sb.append('-'); + } + if ((permissions & 1) == 1) { + sb.append('x'); + } else { + sb.append('-'); + } + return sb.toString(); + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnpackTransform.java b/buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnpackTransform.java new file mode 100644 index 0000000000000..1b7b6ce4d4205 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnpackTransform.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.transform; + +import org.gradle.api.artifacts.transform.InputArtifact; +import org.gradle.api.artifacts.transform.TransformAction; +import org.gradle.api.artifacts.transform.TransformOutputs; +import org.gradle.api.artifacts.transform.TransformParameters; +import org.gradle.api.file.FileSystemLocation; +import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.PathSensitive; +import org.gradle.api.tasks.PathSensitivity; +import org.gradle.internal.UncheckedException; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; + +public interface UnpackTransform extends TransformAction { + + @PathSensitive(PathSensitivity.NAME_ONLY) + @InputArtifact + Provider getArchiveFile(); + + @Override + default void transform(TransformOutputs outputs) { + File archiveFile = getArchiveFile().get().getAsFile(); + File unzipDir = outputs.dir(archiveFile.getName()); + try { + unpack(archiveFile, unzipDir); + } catch (IOException e) { + throw UncheckedException.throwAsUncheckedException(e); + } + } + + void unpack(File archiveFile, File targetDir) throws IOException; + + /* + * We want to remove up to the and including the jdk-.* relative paths. That is a JDK archive is structured as: + * jdk-12.0.1/ + * jdk-12.0.1/Contents + * ... + * + * and we want to remove the leading jdk-12.0.1. Note however that there could also be a leading ./ as in + * ./ + * ./jdk-12.0.1/ + * ./jdk-12.0.1/Contents + * + * so we account for this and search the path components until we find the jdk-12.0.1, and strip the leading components. + */ + static Path trimArchiveExtractPath(String relativePath) { + final Path entryName = Paths.get(relativePath); + int index = 0; + for (; index < entryName.getNameCount(); index++) { + if (entryName.getName(index).toString().matches("jdk-?\\d.*")) { + break; + } + } + if (index + 1 >= entryName.getNameCount()) { + // this happens on the top-level directories in the archive, which we are removing + return null; + } + // finally remove the top-level directories from the output path + return entryName.subpath(index + 1, entryName.getNameCount()); + } + +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnzipTransform.java b/buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnzipTransform.java new file mode 100644 index 0000000000000..82fe2c95f2141 --- /dev/null +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/transform/UnzipTransform.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.gradle.transform; + +import org.apache.commons.io.IOUtils; +import org.gradle.api.logging.Logging; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.util.zip.ZipEntry; +import java.util.zip.ZipInputStream; + +public abstract class UnzipTransform implements UnpackTransform { + + public void unpack(File zipFile, File targetDir) throws IOException { + Logging.getLogger(UnzipTransform.class) + .info("Unpacking " + zipFile.getName() + " using " + UnzipTransform.class.getSimpleName() + "."); + + try (ZipInputStream inputStream = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipFile)))) { + ZipEntry entry; + while ((entry = inputStream.getNextEntry()) != null) { + if (entry.isDirectory()) { + continue; + } + String child = UnpackTransform.trimArchiveExtractPath(entry.getName()).toString(); + File outFile = new File(targetDir, child); + outFile.getParentFile().mkdirs(); + try (FileOutputStream outputStream = new FileOutputStream(outFile)) { + IOUtils.copyLarge(inputStream, outputStream); + } + } + } + } +} diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java b/buildSrc/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java index f05af0740d361..72b062b727b8a 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/util/GradleUtils.java @@ -21,8 +21,6 @@ import org.elasticsearch.gradle.ElasticsearchJavaPlugin; import org.gradle.api.Action; import org.gradle.api.GradleException; -import org.gradle.api.NamedDomainObjectContainer; -import org.gradle.api.PolymorphicDomainObjectContainer; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.UnknownTaskException; @@ -47,7 +45,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.function.Function; public abstract class GradleUtils { @@ -60,28 +57,6 @@ public static SourceSetContainer getJavaSourceSets(Project project) { return project.getConvention().getPlugin(JavaPluginConvention.class).getSourceSets(); } - public static T maybeCreate(NamedDomainObjectContainer collection, String name) { - return Optional.ofNullable(collection.findByName(name)).orElse(collection.create(name)); - } - - public static T maybeCreate(NamedDomainObjectContainer collection, String name, Action action) { - return Optional.ofNullable(collection.findByName(name)).orElseGet(() -> { - T result = collection.create(name); - action.execute(result); - return result; - }); - - } - - public static T maybeCreate(PolymorphicDomainObjectContainer collection, String name, Class type, Action action) { - return Optional.ofNullable(collection.findByName(name)).orElseGet(() -> { - T result = collection.create(name, type); - action.execute(result); - return result; - }); - - } - public static TaskProvider maybeRegister(TaskContainer tasks, String name, Class clazz, Action action) { try { return tasks.named(name, clazz); diff --git a/buildSrc/src/testKit/jdk-download/build.gradle b/buildSrc/src/testKit/jdk-download/build.gradle deleted file mode 100644 index aa0e7958d6db7..0000000000000 --- a/buildSrc/src/testKit/jdk-download/build.gradle +++ /dev/null @@ -1,16 +0,0 @@ -project.gradle.projectsEvaluated { - // wire the jdk repo to wiremock - String fakeJdkRepo = Objects.requireNonNull(System.getProperty('tests.jdk_repo')) - String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor')) - String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) - println rootProject.repositories.asMap.keySet() - IvyArtifactRepository repository = - (IvyArtifactRepository) rootProject.repositories.getByName("jdk_repo_${fakeJdkVendor}_${fakeJdkVersion}") - repository.setUrl(fakeJdkRepo) -} - -tasks.register("numConfigurations") { - doLast { - println "NUM CONFIGS: ${project.configurations.size()}" - } -} diff --git a/buildSrc/src/testKit/jdk-download/reuse/build.gradle b/buildSrc/src/testKit/jdk-download/reuse/build.gradle deleted file mode 100644 index 39a5c3372d5f5..0000000000000 --- a/buildSrc/src/testKit/jdk-download/reuse/build.gradle +++ /dev/null @@ -1,12 +0,0 @@ -evaluationDependsOn ':subproj' - -String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor')) -String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) -jdks { - linux_jdk { - vendor = fakeJdkVendor - version = fakeJdkVersion - platform = "linux" - architecture = "x64" - } -} diff --git a/buildSrc/src/testKit/jdk-download/settings.gradle b/buildSrc/src/testKit/jdk-download/settings.gradle deleted file mode 100644 index 028de479afe30..0000000000000 --- a/buildSrc/src/testKit/jdk-download/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -include 'subproj' \ No newline at end of file diff --git a/buildSrc/src/testKit/jdk-download/subproj/build.gradle b/buildSrc/src/testKit/jdk-download/subproj/build.gradle deleted file mode 100644 index ac94a2963a2b5..0000000000000 --- a/buildSrc/src/testKit/jdk-download/subproj/build.gradle +++ /dev/null @@ -1,48 +0,0 @@ -plugins { - id 'elasticsearch.jdk-download' -} - - -String fakeJdkVendor = Objects.requireNonNull(System.getProperty('tests.jdk_vendor')) -String fakeJdkVersion = Objects.requireNonNull(System.getProperty('tests.jdk_version')) -jdks { - linux { - vendor = fakeJdkVendor - version = fakeJdkVersion - platform = "linux" - architecture = "x64" - } - darwin { - vendor = fakeJdkVendor - version = fakeJdkVersion - platform = "darwin" - architecture = "x64" - } - windows { - vendor = fakeJdkVendor - version = fakeJdkVersion - platform = "windows" - architecture = "x64" - } -} - -tasks.register("getLinuxJdk") { - dependsOn jdks.linux - doLast { - println "JDK HOME: " + jdks.linux - } -} - -tasks.register("getDarwinJdk") { - dependsOn jdks.darwin - doLast { - println "JDK HOME: " + jdks.darwin - } -} - -tasks.register("getWindowsJdk") { - dependsOn jdks.windows - doLast { - println "JDK HOME: " + jdks.windows - } -} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index f13d16c9c6893..221890e4738d6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -1590,6 +1590,9 @@ public void testDataStreams() throws Exception { IndicesClient indices = highLevelClient().indices(); response = execute(createDataStreamRequest, indices::createDataStream, indices::createDataStreamAsync); assertThat(response.isAcknowledged(), equalTo(true)); + ensureHealth(dataStreamName, (request -> { + request.addParameter("wait_for_status", "yellow"); + })); GetDataStreamRequest getDataStreamRequest = new GetDataStreamRequest(dataStreamName); GetDataStreamResponse getDataStreamResponse = execute(getDataStreamRequest, indices::getDataStream, indices::getDataStreamAsync); diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index 28b611a446acb..961eff82ed761 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -16,7 +16,7 @@ # Set gid=0 and make group perms==owner perms ################################################################################ -FROM centos:7 AS builder +FROM centos:8 AS builder # `tini` is a tiny but valid init for containers. This is used to cleanly # control how ES and any child processes are shut down. @@ -61,7 +61,7 @@ RUN chmod 0660 config/elasticsearch.yml config/log4j2.properties # Add entrypoint ################################################################################ -FROM centos:7 +FROM centos:8 ENV ELASTIC_CONTAINER true diff --git a/docs/java-rest/high-level/getting-started.asciidoc b/docs/java-rest/high-level/getting-started.asciidoc index a2c50f5435a91..f7aacc969f6c9 100644 --- a/docs/java-rest/high-level/getting-started.asciidoc +++ b/docs/java-rest/high-level/getting-started.asciidoc @@ -46,8 +46,27 @@ Central]. The minimum Java version required is `1.8`. The High Level REST Client is subject to the same release cycle as Elasticsearch. Replace the version with the desired client version. -If you are looking for a SNAPSHOT version, the Elastic Maven Snapshot repository is available -at https://snapshots.elastic.co/maven/. +If you are looking for a SNAPSHOT version, you should add our snapshot repository to your Maven config: + +["source","xml",subs="attributes"] +-------------------------------------------------- + + + es-snapshots + elasticsearch snapshot repo + https://snapshots.elastic.co/maven/ + + +-------------------------------------------------- + +or in Gradle: + +["source","groovy",subs="attributes"] +-------------------------------------------------- +maven { + url "https://snapshots.elastic.co/maven/" +} +-------------------------------------------------- [[java-rest-high-getting-started-maven-maven]] ==== Maven configuration diff --git a/docs/painless/painless-contexts.asciidoc b/docs/painless/painless-contexts.asciidoc index 784d5a2a5eaba..de3d96a48d193 100644 --- a/docs/painless/painless-contexts.asciidoc +++ b/docs/painless/painless-contexts.asciidoc @@ -31,7 +31,7 @@ specialized code may define new ways to use a Painless script. | Score | <> | {ref}/query-dsl-function-score-query.html[Elasticsearch Documentation] | Field | <> - | {ref}/search-request-body.html#request-body-search-script-fields[Elasticsearch Documentation] + | {ref}/search-fields.html#script-fields[Elasticsearch Documentation] | Filter | <> | {ref}/query-dsl-script-query.html[Elasticsearch Documentation] | Minimum should match | <> diff --git a/docs/painless/painless-contexts/painless-field-context.asciidoc b/docs/painless/painless-contexts/painless-field-context.asciidoc index b6514430e2c5c..b2dc804721cac 100644 --- a/docs/painless/painless-contexts/painless-field-context.asciidoc +++ b/docs/painless/painless-contexts/painless-field-context.asciidoc @@ -2,7 +2,7 @@ === Field context Use a Painless script to create a -{ref}/search-request-body.html#request-body-search-script-fields[script field] to return +{ref}/search-fields.html#script-fields[script field] to return a customized value for each document in the results of a query. *Variables* diff --git a/docs/painless/painless-guide/painless-datetime.asciidoc b/docs/painless/painless-guide/painless-datetime.asciidoc index 1dd4294b63f75..edde26fe0adc5 100644 --- a/docs/painless/painless-guide/painless-datetime.asciidoc +++ b/docs/painless/painless-guide/painless-datetime.asciidoc @@ -825,7 +825,7 @@ GET /messages/_search?pretty=true ===== Age of a Message Script Field Example The following example uses a -{ref}/search-request-body.html#request-body-search-script-fields[script field] as part of the +{ref}/search-fields.html#script-fields[script field] as part of the <> to display the elapsed time between "now" and when a message was received. diff --git a/docs/plugins/mapper-size.asciidoc b/docs/plugins/mapper-size.asciidoc index fbfa2d062f930..c7140d865b832 100644 --- a/docs/plugins/mapper-size.asciidoc +++ b/docs/plugins/mapper-size.asciidoc @@ -83,10 +83,10 @@ GET my-index-000001/_search <2> Aggregating on the `_size` field <3> Sorting on the `_size` field <4> Uses a -{ref}/search-request-body.html#request-body-search-script-fields[script field] +{ref}/search-fields.html#script-fields[script field] to return the `_size` field in the search response. <5> Uses a -{ref}/search-your-data.html#docvalue-fields[doc value +{ref}/search-fields.html#docvalue-fields[doc value field] to return the `_size` field in the search response. Doc value fields are useful if {ref}/modules-scripting-security.html#allowed-script-types-setting[inline diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index 8b6a89cb91601..110cb6f8cc56b 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -32,6 +32,9 @@ PUT /sales "shop": { "type": "keyword" }, + "location": { + "type": "geo_point" + }, "nested": { "type": "nested", "properties": { diff --git a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc index db3e4b1032dd5..74023f3ca5dce 100644 --- a/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/tophits-aggregation.asciidoc @@ -21,8 +21,8 @@ The top_hits aggregation returns regular search hits, because of this many per h * <> * <> * <> -* <> -* <> +* <> +* <> * <> * <> * <> diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index 83d41a3779d3d..ddc2f85a2e779 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -134,6 +134,7 @@ green open my-index-000002 nYFWZEO7TUiOjLQXBaYJpA 1 0 0 -------------------------------------------------- // TESTRESPONSE[s/72171|230/\\d+/] // TESTRESPONSE[s/u8FNjxh8Rfy_awN11oDKYQ|nYFWZEO7TUiOjLQXBaYJpA/.+/ non_json] +// TESTRESPONSE[skip:"AwaitsFix https://github.com/elastic/elasticsearch/issues/51619"] If you want to change the <>, use `time` parameter. diff --git a/docs/reference/data-streams/change-mappings-and-settings.asciidoc b/docs/reference/data-streams/change-mappings-and-settings.asciidoc index 4e7fcd0c7711c..d55319ce72583 100644 --- a/docs/reference/data-streams/change-mappings-and-settings.asciidoc +++ b/docs/reference/data-streams/change-mappings-and-settings.asciidoc @@ -28,7 +28,7 @@ mappings and change <>. See //// [source,console] ---- -PUT /_ilm/policy/logs_policy +PUT /_ilm/policy/my-data-stream-policy { "policy": { "phases": { @@ -49,23 +49,23 @@ PUT /_ilm/policy/logs_policy } } -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { } } -PUT /_index_template/new_logs_data_stream +PUT /_index_template/new-data-stream-template { - "index_patterns": [ "new_logs*" ], + "index_patterns": [ "new-data-stream*" ], "data_stream": { } } -PUT /_data_stream/logs +PUT /_data_stream/my-data-stream -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ -PUT /_data_stream/new_logs +PUT /_data_stream/new-data-stream ---- // TESTSETUP @@ -75,7 +75,7 @@ DELETE /_data_stream/* DELETE /_index_template/* -DELETE /_ilm/policy/logs_policy +DELETE /_ilm/policy/my-data-stream-policy ---- // TEARDOWN //// @@ -90,17 +90,17 @@ To add a mapping for a new field to a data stream, following these steps: field mapping is added to future backing indices created for the stream. + -- -For example, `logs_data_stream` is an existing index template used by the `logs` -data stream. +For example, `my-data-stream-template` is an existing index template used by +`my-data-stream`. The following <> request adds a mapping for a new field, `message`, to the template. [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { }, "template": { "mappings": { @@ -122,11 +122,11 @@ backing indices, including the write index. + -- The following put mapping API request adds the new `message` field mapping to -the `logs` data stream. +`my-data-stream`. [source,console] ---- -PUT /logs/_mapping +PUT /my-data-stream/_mapping { "properties": { "message": { @@ -142,12 +142,12 @@ To add the mapping only to the stream's write index, set the put mapping API's + -- The following put mapping request adds the new `message` field mapping only to -the `logs` stream's write index. The new field mapping is not added to the -stream's other backing indices. +`my-data-stream`'s write index. The new field mapping is not added to +the stream's other backing indices. [source,console] ---- -PUT /logs/_mapping?write_index_only=true +PUT /my-data-stream/_mapping?write_index_only=true { "properties": { "message": { @@ -171,8 +171,8 @@ existing field, follow these steps: field mapping is added to future backing indices created for the stream. + -- -For example, `logs_data_stream` is an existing index template used by the `logs` -data stream. +For example, `my-data-stream-template` is an existing index template used by +`my-data-stream`. The following <> request changes the argument for the `host.ip` field's <> @@ -180,9 +180,9 @@ mapping parameter to `true`. [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { }, "template": { "mappings": { @@ -208,13 +208,13 @@ to the data stream. By default, this applies the changes to the stream's existing backing indices, including the write index. + -- -The following <> request targets the `logs` -data stream. The request changes the argument for the `host.ip` field's -`ignore_malformed` mapping parameter to `true`. +The following <> request targets +`my-data-stream`. The request changes the argument for the `host.ip` +field's `ignore_malformed` mapping parameter to `true`. [source,console] ---- -PUT /logs/_mapping +PUT /my-data-stream/_mapping { "properties": { "host": { @@ -230,17 +230,17 @@ PUT /logs/_mapping ---- -- + -To apply the mapping changes only to the stream's write index, set the put mapping API's -`write_index_only` query parameter to `true`. +To apply the mapping changes only to the stream's write index, set the put +mapping API's `write_index_only` query parameter to `true`. + -- The following put mapping request changes the `host.ip` field's mapping only for -the `logs` stream's write index. The change is not applied to the stream's other -backing indices. +`my-data-stream`'s write index. The change is not applied to the +stream's other backing indices. [source,console] ---- -PUT /logs/_mapping?write_index_only=true +PUT /my-data-stream/_mapping?write_index_only=true { "properties": { "host": { @@ -276,17 +276,17 @@ follow these steps: applied to future backing indices created for the stream. + -- -For example, `logs_data_stream` is an existing index template used by the `logs` -data stream. +For example, `my-data-stream-template` is an existing index template used by +`my-data-stream`. The following <> request changes the template's `index.refresh_interval` index setting to `30s` (30 seconds). [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { }, "template": { "settings": { @@ -304,11 +304,11 @@ the stream's existing backing indices, including the write index. + -- The following update index settings API request updates the -`index.refresh_interval` setting for the `logs` data stream. +`index.refresh_interval` setting for `my-data-stream`. [source,console] ---- -PUT /logs/_settings +PUT /my-data-stream/_settings { "index": { "refresh_interval": "30s" @@ -329,17 +329,17 @@ To apply a new static setting to future backing indices, update the index template used by the data stream. The setting is automatically applied to any backing index created after the update. -For example, `logs_data_stream` is an existing index template used by the `logs` -data stream. +For example, `my-data-stream-template` is an existing index template used by +`my-data-stream`. The following <> requests adds new `sort.field` and `sort.order index` settings to the template. [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { }, "template": { "settings": { @@ -386,12 +386,12 @@ existing indices, index aliases, or data streams. If so, you should consider using another name or pattern. -- The following resolve index API request checks for any existing indices, index -aliases, or data streams that start with `new_logs`. If not, the `new_logs*` -wildcard pattern can be used to create a new data stream. +aliases, or data streams that start with `new-data-stream`. If not, the +`new-data-stream*` wildcard pattern can be used to create a new data stream. [source,console] ---- -GET /_resolve/index/new_logs* +GET /_resolve/index/new-data-stream* ---- The API returns the following response, indicating no existing targets match @@ -421,25 +421,26 @@ TIP: If you are only adding or changing a few things, we recommend you create a new template by copying an existing one and modifying it as needed. + -- -For example, `logs_data_stream` is an existing index template used by the -`logs` data stream. +For example, `my-data-stream-template` is an existing index template used by +`my-data-stream`. -The following <> request creates -a new index template, `new_logs_data_stream`. `new_logs_data_stream` -uses the `logs_data_stream` template as its basis, with the following changes: +The following <> request creates a new +index template, `new-data-stream-template`. `new-data-stream-template` +uses `my-data-stream-template` as its basis, with the following +changes: * The `index_patterns` wildcard pattern matches any index or data stream - starting with `new_logs`. + starting with `new-data-stream`. * The `@timestamp` field mapping uses the `date_nanos` field data type rather than the `date` data type. * The template includes `sort.field` and `sort.order` index settings, which were - not in the original `logs_data_stream` template. + not in the original `my-data-stream-template` template. [source,console] ---- -PUT /_index_template/new_logs_data_stream +PUT /_index_template/new-data-stream-template { - "index_patterns": [ "new_logs*" ], + "index_patterns": [ "new-data-stream*" ], "data_stream": { }, "template": { "mappings": { @@ -481,16 +482,16 @@ to retain such a backing index until you are ready to delete its newest data. ==== + -- -The following create data stream API request targets `new_logs`, which matches -the wildcard pattern for the `new_logs_data_stream` template. Because no -existing index or data stream uses this name, this request creates the -`new_logs` data stream. +The following create data stream API request targets `new-data-stream`, which +matches the wildcard pattern for `new-data-stream-template`. +Because no existing index or data stream uses this name, this request creates +the `new-data-stream` data stream. [source,console] ---- -PUT /_data_stream/new_logs +PUT /_data_stream/new-data-stream ---- -// TEST[s/new_logs/new_logs_two/] +// TEST[s/new-data-stream/new-data-stream-two/] -- . If you do not want to mix new and old data in your new data stream, pause the @@ -527,46 +528,46 @@ individual backing indices as the source. You can use the indices. + -- -You plan to reindex data from the `logs` data stream into the newly created -`new_logs` data stream. However, you want to submit a separate reindex request -for each backing index in the `logs` data stream, starting with the oldest -backing index. This preserves the order in which the data was originally -indexed. +For example, you plan to reindex data from `my-data-stream` into +`new-data-stream`. However, you want to submit a separate reindex request for +each backing index in `my-data-stream`, starting with the oldest backing index. +This preserves the order in which the data was originally indexed. -The following get data stream API request retrieves information about the `logs` -data stream, including a list of its backing indices. +The following get data stream API request retrieves information about +`my-data-stream`, including a list of its backing indices. [source,console] ---- -GET /_data_stream/logs +GET /_data_stream/my-data-stream ---- The API returns the following response. Note the `indices` property contains an array of the stream's current backing indices. The first item in the array -contains information about the stream's oldest backing index, `.ds-logs-000001`. +contains information about the stream's oldest backing index, +`.ds-my-data-stream-000001`. [source,console-result] ---- { "data_streams": [ { - "name": "logs", + "name": "my-data-stream", "timestamp_field": { "name": "@timestamp" }, "indices": [ { - "index_name": ".ds-logs-000001", <1> + "index_name": ".ds-my-data-stream-000001", <1> "index_uuid": "Gpdiyq8sRuK9WuthvAdFbw" }, { - "index_name": ".ds-logs-000002", + "index_name": ".ds-my-data-stream-000002", "index_uuid": "_eEfRrFHS9OyhqWntkgHAQ" } ], "generation": 2, "status": "GREEN", - "template": "logs_data_stream" + "template": "my-data-stream-template" } ] } @@ -575,22 +576,23 @@ contains information about the stream's oldest backing index, `.ds-logs-000001`. // TESTRESPONSE[s/"index_uuid": "_eEfRrFHS9OyhqWntkgHAQ"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] -<1> First item in the `indices` array for the `logs` data stream. This item -contains information about the stream's oldest backing index, `.ds-logs-000001`. +<1> First item in the `indices` array for `my-data-stream`. This +item contains information about the stream's oldest backing index, +`.ds-my-data-stream-000001`. The following <> request copies documents from -`.ds-logs-000001` to the `new_logs` data stream. Note the request's `op_type` is -`create`. +`.ds-my-data-stream-000001` to `new-data-stream`. Note the request's `op_type` +is `create`. [source,console] ---- POST /_reindex { "source": { - "index": ".ds-logs-000001" + "index": ".ds-my-data-stream-000001" }, "dest": { - "index": "new_logs", + "index": "new-data-stream", "op_type": "create" } } @@ -601,9 +603,9 @@ You can also use a query to reindex only a subset of documents with each request. + -- -The following <> request copies documents from the -`logs` data stream to the `new_logs` data stream. The request uses a -<> to only reindex documents with a +The following <> request copies documents from +`my-data-stream` to `new-data-stream`. The request +uses a <> to only reindex documents with a timestamp within the last week. Note the request's `op_type` is `create`. [source,console] @@ -611,7 +613,7 @@ timestamp within the last week. Note the request's `op_type` is `create`. POST /_reindex { "source": { - "index": "logs", + "index": "my-data-stream", "query": { "range": { "@timestamp": { @@ -622,7 +624,7 @@ POST /_reindex } }, "dest": { - "index": "new_logs", + "index": "new-data-stream", "op_type": "create" } } @@ -656,11 +658,11 @@ data stream, you can safely remove the old stream. + -- The following <> request -deletes the `logs` data stream. This request also deletes the stream's backing -indices and any data they contain. +deletes `my-data-stream`. This request also deletes the stream's +backing indices and any data they contain. [source,console] ---- -DELETE /_data_stream/logs +DELETE /_data_stream/my-data-stream ---- -- diff --git a/docs/reference/data-streams/data-streams.asciidoc b/docs/reference/data-streams/data-streams.asciidoc index a8150656cd2da..de4924c1ddb44 100644 --- a/docs/reference/data-streams/data-streams.asciidoc +++ b/docs/reference/data-streams/data-streams.asciidoc @@ -90,9 +90,9 @@ convention: .ds-- ---- -For example, the `web_server_logs` data stream has a generation of `34`. The +For example, the `web-server-logs` data stream has a generation of `34`. The most recently created backing index for this data stream is named -`.ds-web_server_logs-000034`. +`.ds-web-server-logs-000034`. Because the generation increments with each new backing index, backing indices with a higher generation contain more recent data. Backing indices with a lower diff --git a/docs/reference/data-streams/set-up-a-data-stream.asciidoc b/docs/reference/data-streams/set-up-a-data-stream.asciidoc index 4cdeac3f96f1a..b515dd1e3635c 100644 --- a/docs/reference/data-streams/set-up-a-data-stream.asciidoc +++ b/docs/reference/data-streams/set-up-a-data-stream.asciidoc @@ -62,15 +62,15 @@ image::images/ilm/create-policy.png[Index Lifecycle Policies page] You can also create a policy using the <>. -The following request configures the `logs_policy` lifecycle policy. The -`logs_policy` policy uses the <> to create a +The following request configures the `my-data-stream-policy` lifecycle policy. +The policy uses the <> to create a new <> for the data stream when the current one reaches 25GB in size. The policy also deletes backing indices 30 days after their rollover. [source,console] ---- -PUT /_ilm/policy/logs_policy +PUT /_ilm/policy/my-data-stream-policy { "policy": { "phases": { @@ -136,19 +136,19 @@ template API>>. The template must include a `data_stream` object with an empty body (`{ }`). This object indicates the template is used exclusively for data streams. -The following request configures the `logs_data_stream` index template. Because -no field mapping is specified, the `@timestamp` field uses the `date` field data -type by default. +The following request configures the `my-data-stream-template` index template. +Because no field mapping is specified, the `@timestamp` field uses the `date` +field data type by default. [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { }, "template": { "settings": { - "index.lifecycle.name": "logs_policy" + "index.lifecycle.name": "my-data-stream-policy" } } } @@ -159,9 +159,9 @@ Alternatively, the following template maps `@timestamp` as a `date_nanos` field. [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { }, "template": { "mappings": { @@ -170,7 +170,7 @@ PUT /_index_template/logs_data_stream } }, "settings": { - "index.lifecycle.name": "logs_policy" + "index.lifecycle.name": "my-data-stream-policy" } } } @@ -207,14 +207,14 @@ uses the target name as the name for the stream. NOTE: Data streams support only specific types of indexing requests. See <>. -The following <> request targets `logs`, which matches -the wildcard pattern for the `logs_data_stream` template. Because no existing -index or data stream uses this name, this request creates the `logs` data stream -and indexes the document to it. +The following <> request targets `my-data-stream`, which +matches the wildcard pattern for `my-data-stream-template`. Because +no existing index or data stream uses this name, this request creates the +`my-data-stream` data stream and indexes the document to it. [source,console] ---- -POST /logs/_doc/ +POST /my-data-stream/_doc/ { "@timestamp": "2020-12-06T11:04:05.000Z", "user": { @@ -226,13 +226,13 @@ POST /logs/_doc/ // TEST[continued] The API returns the following response. Note the `_index` property contains -`.ds-logs-000001`, indicating the document was indexed to the write index of the -new `logs` data stream. +`.ds-my-data-stream-000001`, indicating the document was indexed to the write +index of the new data stream. [source,console-result] ---- { - "_index": ".ds-logs-000001", + "_index": ".ds-my-data-stream-000001", "_id": "qecQmXIBT4jB8tq1nG0j", "_version": 1, "result": "created", @@ -255,14 +255,14 @@ You can use the <> to manually create a data stream. The name of the data stream must match the name or wildcard pattern defined in the template's `index_patterns` property. -The following create data stream request -targets `logs_alt`, which matches the wildcard pattern for the -`logs_data_stream` template. Because no existing index or data stream uses this -name, this request creates the `logs_alt` data stream. +The following create data stream request targets `my-data-stream-alt`, which +matches the wildcard pattern for `my-data-stream-template`. Because +no existing index or data stream uses this name, this request creates the +`my-data-stream-alt` data stream. [source,console] ---- -PUT /_data_stream/logs_alt +PUT /_data_stream/my-data-stream-alt ---- // TEST[continued] @@ -288,50 +288,50 @@ the following information about one or more data streams: * The current {ilm-init} lifecycle policy in the stream's matching index template -The following get data stream API request retrieves information about the -`logs` data stream. +The following get data stream API request retrieves information about +`my-data-stream`. //// [source,console] ---- -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ ---- // TEST[continued] //// [source,console] ---- -GET /_data_stream/logs +GET /_data_stream/my-data-stream ---- // TEST[continued] The API returns the following response. Note the `indices` property contains an array of the stream's current backing indices. The last item in this array -contains information about the stream's write index, `.ds-logs-000002`. +contains information about the stream's write index, `.ds-my-data-stream-000002`. [source,console-result] ---- { "data_streams": [ { - "name": "logs", + "name": "my-data-stream", "timestamp_field": { "name": "@timestamp" }, "indices": [ { - "index_name": ".ds-logs-000001", + "index_name": ".ds-my-data-stream-000001", "index_uuid": "krR78LfvTOe6gr5dj2_1xQ" }, { - "index_name": ".ds-logs-000002", <1> + "index_name": ".ds-my-data-stream-000002", <1> "index_uuid": "C6LWyNJHQWmA08aQGvqRkA" } ], "generation": 2, "status": "GREEN", - "template": "logs_data_stream", - "ilm_policy": "logs_policy" + "template": "my-data-stream-template", + "ilm_policy": "my-data-stream-policy" } ] } @@ -340,8 +340,9 @@ contains information about the stream's write index, `.ds-logs-000002`. // TESTRESPONSE[s/"index_uuid": "C6LWyNJHQWmA08aQGvqRkA"/"index_uuid": $body.data_streams.0.indices.1.index_uuid/] // TESTRESPONSE[s/"status": "GREEN"/"status": "YELLOW"/] -<1> Last item in the `indices` array for the `logs` data stream. This item -contains information about the stream's current write index, `.ds-logs-000002`. +<1> Last item in the `indices` array for `my-data-stream`. This +item contains information about the stream's current write index, +`.ds-my-data-stream-000002`. [discrete] [[secure-a-data-stream]] @@ -364,12 +365,12 @@ image::images/data-streams/data-streams-list.png[Data Streams tab] You can also use the the <> to delete a data stream. The following delete data stream API request deletes -the `logs` data stream. This request also deletes the stream's backing indices -and any data they contain. +`my-data-stream`. This request also deletes the stream's backing +indices and any data they contain. [source,console] ---- -DELETE /_data_stream/logs +DELETE /_data_stream/my-data-stream ---- // TEST[continued] @@ -378,7 +379,7 @@ DELETE /_data_stream/logs ---- DELETE /_data_stream/* DELETE /_index_template/* -DELETE /_ilm/policy/logs_policy +DELETE /_ilm/policy/my-data-stream-policy ---- // TEST[continued] //// diff --git a/docs/reference/data-streams/use-a-data-stream.asciidoc b/docs/reference/data-streams/use-a-data-stream.asciidoc index d3ae30b471dd9..56c4780bf8143 100644 --- a/docs/reference/data-streams/use-a-data-stream.asciidoc +++ b/docs/reference/data-streams/use-a-data-stream.asciidoc @@ -18,19 +18,19 @@ the following: //// [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { } } -PUT /_data_stream/logs +PUT /_data_stream/my-data-stream -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ -PUT /logs/_create/bfspvnIBr7VVZlfp2lqX?refresh=wait_for +PUT /my-data-stream/_create/bfspvnIBr7VVZlfp2lqX?refresh=wait_for { "@timestamp": "2020-12-07T11:06:07.000Z", "user": { @@ -39,7 +39,7 @@ PUT /logs/_create/bfspvnIBr7VVZlfp2lqX?refresh=wait_for "message": "Login successful" } -PUT /_data_stream/logs_alt +PUT /_data_stream/my-data-stream-alt ---- // TESTSETUP @@ -84,12 +84,11 @@ to a data stream. NOTE: The `op_type` parameter defaults to `create` when adding new documents. -The following index API request adds a new document to the `logs` data -stream. +The following index API request adds a new document to `my-data-stream`. [source,console] ---- -POST /logs/_doc/ +POST /my-data-stream/_doc/ { "@timestamp": "2020-12-07T11:06:07.000Z", "user": { @@ -115,11 +114,11 @@ stream in a single request. Each action in the bulk request must use the NOTE: Data streams do not support other bulk actions, such as `index`. The following bulk API request adds several new documents to -the `logs` data stream. Note that only the `create` action is used. +`my-data-stream`. Only the `create` action is used. [source,console] ---- -PUT /logs/_bulk?refresh +PUT /my-data-stream/_bulk?refresh {"create":{ }} { "@timestamp": "2020-12-08T11:04:05.000Z", "user": { "id": "vlb44hny" }, "message": "Login attempt failed" } {"create":{ }} @@ -156,7 +155,7 @@ PUT /_ingest/pipeline/lowercase_message_field ---- // TEST[continued] -The following index API request adds a new document to the `logs` data stream. +The following index API request adds a new document to `my-data-stream`. The request includes a `?pipeline=lowercase_message_field` query parameter. This parameter indicates {es} should use the `lowercase_message_field` pipeline @@ -167,7 +166,7 @@ During pre-processing, the pipeline changes the letter case of the document's [source,console] ---- -POST /logs/_doc?pipeline=lowercase_message_field +POST /my-data-stream/_doc?pipeline=lowercase_message_field { "@timestamp": "2020-12-08T11:12:01.000Z", "user": { @@ -199,13 +198,13 @@ The following search APIs support data streams: * <> * <> -The following <> request searches the `logs` data -stream for documents with a timestamp between today and yesterday that also have +The following <> request searches `my-data-stream` +for documents with a timestamp between today and yesterday that also have `message` value of `login successful`. [source,console] ---- -GET /logs/_search +GET /my-data-stream/_search { "query": { "bool": { @@ -230,12 +229,12 @@ GET /logs/_search You can use a comma-separated list or wildcard (`*`) expression to search multiple data streams, indices, and index aliases in the same request. -The following request searches the `logs` and `logs_alt` data streams, which are -specified as a comma-separated list in the request path. +The following request searches `my-data-stream` and `my-data-stream-alt`, +which are specified as a comma-separated list in the request path. [source,console] ---- -GET /logs,logs_alt/_search +GET /my-data-stream,my-data-stream-alt/_search { "query": { "match": { @@ -244,12 +243,12 @@ GET /logs,logs_alt/_search } } ---- -The following request uses the `logs*` wildcard expression to search any data -stream, index, or index alias beginning with `logs`. +The following request uses the `my-data-stream*` wildcard expression to search any data +stream, index, or index alias beginning with `my-data-stream`. [source,console] ---- -GET /logs*/_search +GET /my-data-stream*/_search { "query": { "match": { @@ -288,12 +287,12 @@ statistics for one or more data streams. These statistics include: .*Example* [%collapsible] ==== -The following data stream stats API request retrieves statistics for the -`logs` data stream. +The following data stream stats API request retrieves statistics for +`my-data-stream`. [source,console] ---- -GET /_data_stream/logs/_stats?human=true +GET /_data_stream/my-data-stream/_stats?human=true ---- The API returns the following response. @@ -312,7 +311,7 @@ The API returns the following response. "total_store_size_bytes": 624, "data_streams": [ { - "data_stream": "logs", + "data_stream": "my-data-stream", "backing_indices": 3, "store_size": "624b", "store_size_bytes": 624, @@ -346,11 +345,11 @@ manually perform a rollover. This can be useful if you want to to the stream's write index after updating a data stream's template. The following <> request submits a manual -rollover request for the `logs` data stream. +rollover request for `my-data-stream`. [source,console] ---- -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ ---- [discrete] @@ -370,50 +369,50 @@ You also can conveniently re-open all closed backing indices for a data stream by sending an open request directly to the stream. The following <> API request retrieves the status for -the `logs` data stream's backing indices. +`my-data-stream`'s backing indices. //// [source,console] ---- -POST /.ds-logs-000001,.ds-logs-000002/_close/ +POST /.ds-my-data-stream-000001,.ds-my-data-stream-000002/_close/ ---- //// [source,console] ---- -GET /_cat/indices/logs?v&s=index&h=index,status +GET /_cat/indices/my-data-stream?v&s=index&h=index,status ---- // TEST[continued] -The API returns the following response. The response indicates the `logs` data -stream contains two closed backing indices: `.ds-logs-000001` and -`.ds-logs-000002`. +The API returns the following response. The response indicates +`my-data-stream` contains two closed backing indices: +`.ds-my-data-stream-000001` and `.ds-my-data-stream-000002`. [source,txt] ---- -index status -.ds-logs-000001 close -.ds-logs-000002 close -.ds-logs-000003 open +index status +.ds-my-data-stream-000001 close +.ds-my-data-stream-000002 close +.ds-my-data-stream-000003 open ---- // TESTRESPONSE[non_json] The following <> request re-opens any closed -backing indices for the `logs` data stream, including `.ds-logs-000001` and -`.ds-logs-000002`. +backing indices for `my-data-stream`, including +`.ds-my-data-stream-000001` and `.ds-my-data-stream-000002`. [source,console] ---- -POST /logs/_open/ +POST /my-data-stream/_open/ ---- // TEST[continued] -You can resubmit the original cat indices API request to verify the -`.ds-logs-000001` and `.ds-logs-000002` backing indices were re-opened. +You can resubmit the original cat indices API request to verify +`.ds-my-data-stream-000001` and `.ds-my-data-stream-000002` were re-opened. [source,console] ---- -GET /_cat/indices/logs?v&s=index&h=index,status +GET /_cat/indices/my-data-stream?v&s=index&h=index,status ---- // TEST[continued] @@ -421,10 +420,10 @@ The API returns the following response. [source,txt] ---- -index status -.ds-logs-000001 open -.ds-logs-000002 open -.ds-logs-000003 open +index status +.ds-my-data-stream-000001 open +.ds-my-data-stream-000002 open +.ds-my-data-stream-000003 open ---- // TESTRESPONSE[non_json] @@ -461,8 +460,8 @@ write index, we recommend you update the <> and perform a <>. The following reindex request copies documents from the `archive` index alias to -the existing `logs` data stream. Because the destination is a data stream, the -request's `op_type` is `create`. +`my-data-stream`. Because the destination is a data +stream, the request's `op_type` is `create`. //// [source,console] @@ -495,7 +494,7 @@ POST /_reindex "index": "archive" }, "dest": { - "index": "logs", + "index": "my-data-stream", "op_type": "create" } } @@ -505,16 +504,16 @@ POST /_reindex You can also reindex documents from a data stream to an index, index alias, or data stream. -The following reindex request copies documents from the `logs` data stream -to the existing `archive` index alias. Because the destination is not a data -stream, the `op_type` does not need to be specified. +The following reindex request copies documents from `my-data-stream` +to the existing `archive` index alias. Because the destination is not a +data stream, the `op_type` does not need to be specified. [source,console] ---- POST /_reindex { "source": { - "index": "logs" + "index": "my-data-stream" }, "dest": { "index": "archive" @@ -540,14 +539,14 @@ data stream. These prohibited requests include: Instead, you can use the <> to update documents in a data stream that matches a provided query. -The following update by query request updates documents in the `logs` data -stream with a `user.id` of `l7gk7f82`. The request uses a +The following update by query request updates documents in `my-data-stream` +with a `user.id` of `l7gk7f82`. The request uses a <> to assign matching documents a new `user.id` value of `XgdX0NoX`. [source,console] ---- -POST /logs/_update_by_query +POST /my-data-stream/_update_by_query { "query": { "match": { @@ -577,12 +576,12 @@ prohibited requests include: Instead, you can use the <> to delete documents in a data stream that matches a provided query. -The following delete by query request deletes documents in the `logs` data -stream with a `user.id` of `vlb44hny`. +The following delete by query request deletes documents in `my-data-stream` +with a `user.id` of `vlb44hny`. [source,console] ---- -POST /logs/_delete_by_query +POST /my-data-stream/_delete_by_query { "query": { "match": { @@ -609,9 +608,9 @@ If you want to update a document, you must also get its current You can use a <> to retrieve this information. -The following search request retrieves documents in the `logs` data stream with -a `user.id` of `yWIumJd7`. By default, this search returns the document ID and -backing index for any matching documents. +The following search request retrieves documents in `my-data-stream` +with a `user.id` of `yWIumJd7`. By default, this search returns the +document ID and backing index for any matching documents. The request includes a `"seq_no_primary_term": true` argument. This means the search also returns the sequence number and primary term for any matching @@ -619,7 +618,7 @@ documents. [source,console] ---- -GET /logs/_search +GET /my-data-stream/_search { "seq_no_primary_term": true, "query": { @@ -652,7 +651,7 @@ information for any documents matching the search. "max_score": 0.2876821, "hits": [ { - "_index": ".ds-logs-000003", <1> + "_index": ".ds-my-data-stream-000003", <1> "_id": "bfspvnIBr7VVZlfp2lqX", <2> "_seq_no": 0, <3> "_primary_term": 1, <4> @@ -682,9 +681,9 @@ You can use an <> request to update an individual document. To prevent an accidental overwrite, this request must include valid `if_seq_no` and `if_primary_term` arguments. -The following index API request updates an existing document in the `logs` data -stream. The request targets document ID `bfspvnIBr7VVZlfp2lqX` in the -`.ds-logs-000003` backing index. +The following index API request updates an existing document in +`my-data-stream`. The request targets document ID +`bfspvnIBr7VVZlfp2lqX` in the `.ds-my-data-stream-000003` backing index. The request also includes the current sequence number and primary term in the respective `if_seq_no` and `if_primary_term` query parameters. The request body @@ -692,7 +691,7 @@ contains a new JSON source for the document. [source,console] ---- -PUT /.ds-logs-000003/_doc/bfspvnIBr7VVZlfp2lqX?if_seq_no=0&if_primary_term=1 +PUT /.ds-my-data-stream-000003/_doc/bfspvnIBr7VVZlfp2lqX?if_seq_no=0&if_primary_term=1 { "@timestamp": "2020-12-07T11:06:07.000Z", "user": { @@ -705,13 +704,13 @@ PUT /.ds-logs-000003/_doc/bfspvnIBr7VVZlfp2lqX?if_seq_no=0&if_primary_term=1 You use the <> to delete individual documents. Deletion requests do not require a sequence number or primary term. -The following index API request deletes an existing document in the `logs` data -stream. The request targets document ID `bfspvnIBr7VVZlfp2lqX` in the -`.ds-logs-000003` backing index. +The following index API request deletes an existing document in +`my-data-stream`. The request targets document ID +`bfspvnIBr7VVZlfp2lqX` in the `.ds-my-data-stream-000003` backing index. [source,console] ---- -DELETE /.ds-logs-000003/_doc/bfspvnIBr7VVZlfp2lqX +DELETE /.ds-my-data-stream-000003/_doc/bfspvnIBr7VVZlfp2lqX ---- You can use the <> to delete or update multiple documents in @@ -722,17 +721,17 @@ If the action type is `index`, the action must include valid arguments. The following bulk API request uses an `index` action to update an existing -document in the `logs` data stream. +document in `my-data-stream`. The `index` action targets document ID `bfspvnIBr7VVZlfp2lqX` in the -`.ds-logs-000003` backing index. The action also includes the current sequence -number and primary term in the respective `if_seq_no` and `if_primary_term` -parameters. +`.ds-my-data-stream-000003` backing index. The action also includes the current +sequence number and primary term in the respective `if_seq_no` and +`if_primary_term` parameters. [source,console] ---- PUT /_bulk?refresh -{ "index": { "_index": ".ds-logs-000003", "_id": "bfspvnIBr7VVZlfp2lqX", "if_seq_no": 0, "if_primary_term": 1 } } +{ "index": { "_index": ".ds-my-data-stream-000003", "_id": "bfspvnIBr7VVZlfp2lqX", "if_seq_no": 0, "if_primary_term": 1 } } { "@timestamp": "2020-12-07T11:06:07.000Z", "user": { "id": "8a4f500d" }, "message": "Login successful" } ---- diff --git a/docs/reference/how-to/search-speed.asciidoc b/docs/reference/how-to/search-speed.asciidoc index d37869047bea2..c6a27b0c86a04 100644 --- a/docs/reference/how-to/search-speed.asciidoc +++ b/docs/reference/how-to/search-speed.asciidoc @@ -165,7 +165,7 @@ include::../mapping/types/numeric.asciidoc[tag=map-ids-as-keyword] === Avoid scripts If possible, avoid using <> or -<> in searches. See +<> in searches. See <>. diff --git a/docs/reference/images/data-streams/data-streams-list.png b/docs/reference/images/data-streams/data-streams-list.png index 8e182470dfd7051e82cf00b72354593248d228dd..38ec3727e49b8dce3720f1c65080a94dfbe917f7 100644 GIT binary patch literal 55222 zcmb^YWl&r}w+4&?AuvFI!2-btcXwxScXtvzI0T0Pg9Ml09^BpC-QC^Y9X{UooKyGT zcdKsQr>44U@0s1br)71owVvmXqPzq$A}%5n6cn`Q2C z;#mTxbE5X>>uz_Sh&7&wT&X-qp3(SL|9Qsyfa-&!{XX^CB4^+0^??z~=1HI!uh%zq zi)AiZx0|6YNG?^n-s#hxnD=8(Udx^v^LB{bqiladZC3sYjaPFW%Pj_~2Z~6(D`~Kl9s-{Kn3ne3}_IbBOzaD>yLd(}iDH^Yoagtk+mPt8}c3z`{*H`>NLd$i7 z3+i{=!h5wWg`?LNtChCa?2P>O_6(8rx%|rJ3=t2^6sB66M57{+w=~vWU&MXo*1;fk zx*NwQWFizi0X2h#)mQxXMx6tz_w>2m@3<>#Z5g&U`RenfHrfPl>$!iGrKvxPqTH5J z6#lPc`MwqHZ?8vp+2id#@3FS)?+;mPR_||=(v2kVk`@)$XJ-F-;!)UYQSh;jyeCmCZScw5xIG`+&g^%U5DIdM!BhN^}>eh)oy1Z zc2nouI?XSS2CB`{)>r&^_tE=*Q&{8IgVScR>q*06Y>S?s-By>!-6m;P1vqpHGv0UD=CJP_xyq%j0#pD*hga@j_malh)Q*C> zEH!-IoDYY-=2gzv5afEJxOBPcd%rwZ?Y6BmdQhDBR?JFdc&)GTrZaxbYwa_uL)%c_ zNZ*U6y$M3%vFoWd{rPP-Xeg*mvsO2K?I){mcEb=I;+B>+kMnKYc(Jd1W*?6YSJmCj zvuvaUK~Q}!?u(b{feihu@K8|1K<{N1LsW=M&}02@%Duo91PBGSp~eUL`R(G28MyZT z%Z(P;_OZl%3q4G7w!eBiy0{`j+C9JkD5wkVbfAsz#Tzxy{lAw*=|DutM_c8W2%Dus z591&2bkQF6u^5Sg|GyVuu;7m>|GyMn&W4JhYsDuOdR%VJ7HdpTO(b(~-L_Ul(^)8GYN#AM5eSNWecw&Jzbzr zufk4A*eL5Rt1)Oj5!mG6Y8a^3eB0qT7-N!~YeJ8Pt{hNQL}RDNcCwn!m>v-^CMW^$ zA#lly%$ohq9c>`MQ2$!X>r?WqVDK5%f4R=O|K4t61C+DtDGd0Z_6euMQGB4xE{fqk z-Uih27zNaCFAD&ohg&?JYUvPAsJcJF;ka!6BbNo}Otr+--${_b5wd_@a$fgs+ph&2 zO;0g40w6&_GE*4lAA?K8mzMe$5@n!MJ+ilCX*>IbR=dL&VPESBDNRm^S1(=gq0iuN z3(xM4*!19XkHF1SV;5P_?uz>GzEG@ zN~6|j%q(40f;TRSAjA+XEG#W8t%|elWosiNBV%J@MMb8UiJZK=_yi5}#s>#eXJ_YM zzwXj_T|8f349gP}65!>yk>iwrUA?k^*-?-fH5HYlv@{~%OIUDl&tE2HXZDGTfB#5a zWk<)y%ZrQmNyOy1t_KDO2Zx3jXlbnwxG!&Ry1qz+zuAnfJW95#o{60N{sopSmXwk@ zopymr9!X|*S-4QDf=mSP?!$6+?qHy!SJXp8`~H!VTF6~m(suSL3PbJ`F#Jy4Uev@x z%Zs-kOdk@`6dM~G%Cp3cU{Pagw!hNkjJNL>-aGg4_ukpQzC~s&S$ZO98X77BMe_y+ zsj;x)R3WjwBlar?x~r+j8(}dqF|V(mNNiU!Kh19Wd>)2AIy!nF4GRtJm%bDO5z(sA zRW~%?sC4x}$)cnD)FIKMqo?mhF(iWd%e;%$?>;jz5hqpe z#}4w1|M-{3qd!Jh3)l2I^_w0G%`QZ3A}`Nfzp?<`_Fn(96n*E0;W}SEyQeu0VLjW2 z9WO!S&XitxgcoZi7zad2Yj*eh@6{gXZ;=7H?6tQpI%9;5MoNpMoHJ`WS|({t!E>65 z%6cl4k+(tLpa0q@?Oqk}oEXVOiU7f;R$t@iqqu$Ex|*KcC*r>MN61XAlOu-_aCVN4 z65Og;+REykZMwIbU!sqySEZif@Q4P`YLsm`qsai&*eEzSe!ctphwN?lDO6Qg2XPb4 z{;)A6Yc7EPdHQ$FKrtx237b)gfQZN-nd8XZk!E|_e`r{vgpIX2k;{T*>g(6|krK}l;$7xaOKl|^bD9~cly+t;@#UIbo) zuxky?&wVbwIq>oErR21^oQa;V)*CRN)mX4@jAGR&u97h`|2ZNyhaUhn=FawHiAV@l z79a@xDRzH4Z@ItpaTf5Rs8b64Sy#}ou+rtQv5}Qrn(k3}S~zWnj*ec4(uOX#$T)`N z?BvY2#m>NB)7>W*qD8OQ1hW#eDB$IR`t@z!?9%nUTLCk+hY?d(OX$DIjQa_!e?M&! zu10Z%TOFaIDbD?ORpj2nAb8e%ZwbKh`z5cbs(rlzZtlSQ_VYsF>mThfb*l1)lDuIa z?DGpY8%Z%;x$HQOU4!l6~yLq%4{wJDKR+5IY}(RgF-AqegSf=1{+=z5YnaM%GLfy?Kal-lrc3Ju}wO;T|gAMZeiy)m7r^z^bp1Ys6z zI!#fS*1t2VXHlrAE>4yn2N!5DTuLJJEhsHY@mHgN!oxqIfpPzV0S(Dkp=eTHYb>2f z5%}DWoWRCgLvh7XaPTS*#Ggy7f4YUAdrf$cEL_Is%bqTkVw>TnBwoG$Wdfzru?q2e zz>}PI@Sb}^91mCX+sR&Im&L`z3aQP#y*s6oGSS^{PFS6fZqD0?rte1YsrJL`*CHb$ zHEaRt59m4V?UJ)^({-x{w(Sz#kpgdF4)_FVLKK^V7#56-wl=-l{xa7M20qi@);prI zNORgb(8-<-p{v-7$|)f7V+=YV&DhEM^$ubGpx5U^jRWV2jjY~Y@{QPsfUVAd?zaRS z#xowc2Grh(nWQQMf}k%uwpY9JYxv9#Y&?%Q*X7g_!rN}fAN?&f*&CWbY>;bN{s*^x$o>hZ9kaH#%4*NH-P4NE(U`C z$Fy|94HqBx+d4+Ov3S})#?V`9f*}$O@+uGnP7ieeB7O4+UC7%Y|&UsA!hUQKc`2H0G{8h#wN(7w@yaKBPx}op9uaE)Ey}zP< zUw6-s&N1p&Op1haZhN))a%DE2BykDuC(C3$D_}-!Io#Q}%1uBy-tUQGj;nNj#JcR~isF>V9!47Wje9(3_jQ`p4>jWbm(g+;slB z{K^bEStztxfAf#hZ1>&{$K#xuW`$x)Z8tm`VvpX;{2Z5KjSyALS z`p3j2J8x)xx``DFNXTRi5w?frv|a0J|GTgMQ)GmXkHn7?n4xH?jjLYc{R)R^z21hA zAE>T~71&q4CrI>vyP@IyZj}7+DKA|WVg0Pw{L|iuhyW6*8$Hhn`+h}v+3e_KN@|;y z`KnRltFXT!J*QWJu{6f$YnMuNS)ujsR}e!HkdML)(>Hv^iYhI}n36>af(YDgtsa=8 zr}~2cipw+ox{s})^ocKYsHhr=#;$PRLr@>2ry8}~@~>z!MK5QvkJsrF-r4Jb7mP3` zYp$<&24j^GIGB=WguX;_kZx#*vE0gh1ca-lsQj4+HgxFk$kt7&a*i9OzOA%4S{QvX zNL=Ns8hIYp9=4h>$S{k1m%YXM$}aAJwY&h`f)FLHvpb73C0WhyQBlc4%sQmSqac$y zxVvu;Q#BLgizxpDi(@E_)$w^&Momdf%}LDBQu7RANL42%>(WTm9$})UCL*)Zw7#Sr zkosG1BGie!;AU6-(Po2g#Gj2Qz;1!24J?n3>~wYj`hvzP6GRanqAI? z1JYZcwzw^>R7oH!_%oN4zi6ONTtb4+`zsCwgm1Ry z=H`CQcD@@C zfn5^D4zk!SPGfS1cM?hX(~`B(9=La+t^|E;nUDpzM&ijJ*7`mvh2>kU#Cqn2%Xb7v zhW=G|wKGlxJ$1mNo|kZAM2GD zl;tPK&E8VVlH^dwPl<+t@KQ%tjCmQV9siO;di?GV^%DAYB)3O%wdNA6ZD)sdyU3}u zaDU={O_Q62={$o=dJ;1=r%B#QQqkl#4n6LujcC>2o%nfd^7iR#&9QP6+xF7HD8oG8 zcH#^H2O!Q=TWY#o>ifvOE&@pU6Z7^;w?K25uAns`2$?;)jWfk(fR{f1r1Qn<=obH9 z4)CzlWY><_+4W!XB*RaPODizb9tFudt@qNwY;*X{ zl%o|azal6Gt-Ymg6VuKpM@jDWS{dXjqT1NgJSIZ-exAls$<-dU_3Tae)=e1){Vh=r z2>AxIBhB8&?e7ziNXF$_({k=|xs@{DVVSvTNnf(a^=DgzsapBNyZSE{iH*~9__3p} zJK3%Dl!O=W-1R4n7ulWnA0oQiggm`&t#YnSc#0PZ5CphdqT86vl4a_q0mqE-+h#cv zX!v_X)9@c07T3-_5fgv~XcL^m{3PtR;t-V>ijZY`J(^NaLiA5;Aet&8*$PFj=X|)F zM<_BHfGX-MDP(%vop?#5-D*luxCsqhi!w|2aL6zcvMp+&fOqGDI9)_7$%%>59R?Zx z9C=&u<6eGQw|Go|+A{rFQZaP^cM_1r7Pj0Ur|5gW4PkoviO^>;q3>JX-_t@-=?F96 zJZ5S@JEi^?QOm~uioUv&hU;*{Kj3)(jP4hL+YleI?%=0rk`YiKaAG1?r7Ys%CB|k+RZc z9KVAfu8OTIm2wJ)i9m96Y;<9w!KZ7_Tmm3pr#Ph^{_SZ$Ax!580>Lb|A8Ts@RYj*k zrlxyNdj$$QzOZdjNwE@>e_4C?9kA!HLA9axmSO&$iqPljLf0>Fut0bnMsjvV) zv=-WGB2FO?LsaP341OG+WF*FE*ld2K_sTWfLYKS0jU^V#(04{Lr*lA-{F$Q`JEG?w zGdN>?96;Qdp$bSl@!HDi?0Z5{Qmr{jbjeUd0X_>$-u8i)OZQ(E-Jr)2{o0~+^cWXb zUN!UG zGM)OFz`AZMhyEn++6E!C{O}Svvi!ZKSY5Zdwxr^nX{|r6GVv!(qhw}zVBrxZq=Vk< z%sv4Q=Wua#Xn@|InX)IVnE{S{r8!b?JZcBqOq=JS$7CfLGCKxw!fu!Oyys@C z;jdq9zGv>)Q_9$5ckthae=|}(>$@9&M`9jo{vj;u3UR4a%P6U;_}C6HxL73T`nck9 z3=N?52{+XVAW$K;rIFuqvN$!Qr5Qjwg!U4$BA88XoyP-XeAh7)+R^dx!5aFvg4`GU zjfV!AuLGQWJ3Bk@JDWpTO^C&5ZWrPBiF_u>c@QA3)89x0COI%I+$iwLP4SN?`vHk1 zIz7kBCm|)O_mM@%oFs%wRMZ&QiUKr1sD$KhNpXYxMG>09O;L+Xkk+sT z5fA)3*9|``0(&5TW1k4>?oZtyHFYus$V=hDpoOxPwUPT%3Bio-D(1P{D6dPsq2-^S z#E%QOvxP<^WNbz)dCce#4x5#*5mdo9m^t?lv_>N_lidwa_h1_)BzBP)+Q)HhBPA;-n^PK}&lMn+ zFP|o!yiZ5`feCj&@B~Z)tNhY2f-m6|v3EVXWDcBKKau@2@uaS%(^yw_o8*cVLu$)> zYDK3Z&dZhX4^ z_;@u#X^8|6bGB%*rQ0a!+!!aca&rqhy7o$bvMivv)ZPhI%Kk9J-`MDn??IVou>Mm>=2c> z2!kb=?=*d}HXsKH&A+6wg|Y01zWH#1fTI3)=DF67nQKf5REqYWjo z>PQ_=6pQ-Gph@J#0qfOIMLVvU#L*A-)TTNYEWGl|W1)Ud!vIT~c1$E+XT>BWC|G|u zOx)9*lS=B<66{)E( zZ+XIiBq^As{ZATHJ|0`-HDV=5$l%0K0+GGXtD!HSVt9ZgCr^s|vmB?uY;fmF&3VV1 z_44j(hWyW1afmt-ixGAa{}Tzfy&4cKB0{?3biNq$5^UKSPrs~)L6I<{=npi$UrA2o z;Ak`}_b#Zgx?iJc6|9^)Xu3UaI`|gf6NBnn z5a}mz0bRMxLBkN5cG?Q!n#w2A>2O_R`6P%>&n&(y$MJl{JUUn@AK zUDq60`*uQQ`1OK!F|UkS@fkMm$)@#FvXGl{viTueDj7U?aW_U1MqLv=E27vEBilE~ zMa-!+{JUsD49{l@PQ))k$>3zf*J?r;>ypmwU|{@w*wVp|J>2lyD3sHB9bJaS74Xs= zD;#bjoV?44CmydoiuB??4B}Tq-_Kyzheu@(S%g_hPvi5H38Pw-ke?WU6!C$dq-Q@Y zEnlXT)aO_6TGkFz#b8g3(puJ9o1hhu#ajPeX`9!B44GzmC=P(qOxr^dM$sW2jGR6W_srul)$@-#Utn zqW)lY1_IG=3ppL02hqWb6b?c~rYYSOn9>K{SwUjD);dmhiPrma7>Vj!Op!40gQtkX<|EFiF5m zM?!TynL;L0pcHbq`Njrnrmyb(Vg6iWq?^^H84YyPnobtZk<_+~e1a>J($H;v z2pW&0^sH>%NPSgb(Do2z1>sjLbxO`zX!0y^5 z&&i{{fZP<2Lo)j1B}#vwNAHZQHMY;_BW4?oMk*D@?qk~!t6FhcW6&&CXSk;c*0n^# zdV53I?2%FlR$`L^2tT%+Eg;W+2?h?$8kPj4eu77-3eJ-P56qy+ODuk)V7uS24brSA&r-tH@ik|x&HDc?|T9sr_B|Owg_O8 z%=CJXen7X`hJTUjO0T`D$nyD)fXc5P{ejm=oLMWEr3i6GA8IOq2@l)JdD8&{M(PkW zkWmx^l+Or|b#z3ch_|!}T$4<}PAmRCDx!whBL0@rdyHW+$`$P`?wK@Vg!`xi$L=5`G3j zFmXlLBml87xMpu35zpv;;zvBU6ZHQLSVA|Tw-UyM_ZVf{J-8@oFgb+?7=8T%c@GbJ zoFK4`4sHB3S>Mf!3Hs>G^;^wTs%5IVd!79shCZQ;gi%20X*wDjE9Sn9T9j#* zroF*fSWmEulB%8x>K>Q9 zyI`Yw9+5d7T;6w4fP~Fl6mYD1kh*9~{%kl+u{gb$Ku#|^DN>j`4gsVa;|E-m8Zc?= zlkh(!XdeAsf@jf}{9Rw=SMzsU+9EvSgX!wXDY~@!d9PapqN$wMdphd)F0MkaA`3wIL?(p#>pcXcNezOg3P=OQ%nyfNrL;-0~3#p*tnyPU4Rb7k6Kb2zXh0f!WToea`)N~Y}i%U-wx zPAY)0{S20SZNYv!I=w&`U?eJyMSMF|#)6nK6)EtTiU4AM>8l|3@z!?7B<9QX1H!>` z$^f44@Or!r*xEVd{pO>GsC+<*gz>MTcTw7360>Lkn_1mK(KZpl81hq<@zy5{P|N$m z{~6S-;er62k+))y&!P|q4fx8$dE@B@11WN_rKU?aK5`3|3$}6m zx;cggrU*uA^m(h^(WU99qd_~RHe=A^K>uPXBD+U^~6@WxFMK&s+dDIrcI9Cz@JDvP0CI{QfBA~gTF zIO}d22Ig7-yX1=rVokt_>3UNI33#a^9%8izPQ*xY;)xx?C`3Gv*5SxUpwF_s090MX z0uK#x_nr$Yu}J5_UvSQ*HK27u)n{Qdk5plHQ1CL?i0W62b{b3g?{Uq%h;|n+m%W3& zek=rt8Uy@g0+tNPOyRHwamFnnfR3;14c$Y&0`yeStG<0p>Tp3K(LZ3n4SJ!~xZSw^ zhb{I+Mg!Z?yzNNEZe4?bjh( zw5r{xg`nnir(#r)VLl6>(vbeS8l+7kWtM<{7eiqBFE+%7p@y_R*G{eJ+89V8B!qWe zBr+4mf40G)a_>6Bu>v3NODsRn62nM%5g1bO+?%T=nZX_j`9_x7hSMxs-p%HYhoWnd zpraV^FJ}b3p$z()_}_ra@CJs^ZLU7^|u?YCXP;-6zE`q=BCve%e!*{Jq^zCH=reoI{Va7lF*2{jm zu1==g6Pw}GFCs|b1>cXb*SqW9iPnygTg)fI0t67h{VYYC5);qYvuG7>-5#ILci0O) zqd=vN|%kR*$E zv!FhuoI_hMW>Axh=R;E^G_$ ze0E{PF0~Q*0grA!#@9v37Qow%mEgESoRd?M48w=RBM!>x>2QwNE1OIQQ8#_v>~%mB zGyc7wvYOxzy6!nE$z<)$x+axnabHjKpG5y=bs!*Z9TK;92@`*x#U%<@x(eQKE>v3;?ogs z!lUXV;MR2_+(Aa#RTkLV?2; zxM}kK5;8t$G8~`1*W!l6mNpcn@P3QO5rc{|666`Xfsjex3iCHJ6BBVUyAa>^HTEuJ zd1+B=;PMhGaimfs&OjV686W>?k$~I4NZk}<1%j!!SyMD z3SvGB)QsYw3l)(G>Bal*bdKPV1d_l+zchgKNAZA}{sZa=DfeEF*YEV3iH9R5hTP8} zOyGa;@D7$gfI(QGqb1@JRqAAALvD)Lx%nKYGY87AjYE$$+6a*Oqh*h~BC>7#g^e|J z>#X$jHedhf{I!4^&6(DnM)qsg<= z5}28rBZw-${oXJCTTNfin%)g|watrCKZ&OZ0n|i}>koB_Nkud(nL6;--zBBRCI;E) z7y+~e9V1495^O5@CEN|=kC0Kbq`o+{pWtXXjNTUl4!`d>MG~Uy1$%W~+XP|@2#+E# zZ-s!FP7zMA5W-I&-TL&D?+WQ|o6bM+)Wb`?T94*MxVJ*Y&Ml)3-oNy3yy3JSl| zqcoROIzm&FF?ZIp5Q~d21>TyyZ0f>tcsR%jd1(AjRa95!;dmdsIQRaZj5wPM z^*S&jmMRW2A%HCHcz%>*_~R%Fe##s` zF5z#?h$l!ip~hRUyotHe2eWd9vPbTM!5`->q~ptn8Reo0c>Yu?lyPzsFBaOvP3_9# z@y(haRqHI14z=cNT0Nn6s? z&Q3OF0=$GrFQwx)r7?|VLonYp5kTUQJ32A7PVXzuytiIxn$FG5O)kb!?ssLdYX%&T z(*e^V6KMFoz2|D_>>-J*+2%7S=ER>HxLj*&JSXxnz@6RHJtEYKd9i0V?T9JqR)w{gTYEpL6=LZo^76-;ikFrVT4@tiGXpgLIIl^qJUfE zMilpsQ~ab1&!^A57#g2;L}3El_sBzjW=R2#iQs(Bmr5zA-8c|1Cu;}@(%AChQ8WIc znt@ycGy|VX@=lKInloPN)9~#MqJ=TMV?mg|}5w_)@Ud#Ky8#mKY=e%RW4% zeg7xr@#i}tb9v*iQatu2-Wjb%OJr|zp|NcAF3NNhFkmIW945l^N4?Z=sEl-j0Hrtb z|8j2SC6x}7;X)LJGXp#uDoQBWn=$tyO2nkZVA?4WHtFfn!J-KB@&I0^voUmV{6C>F z5idXAhi7=IC9vImqYUa_2A#GTh|-03AvMCt$jH%DDP@TJ-O={xwv38#W^zUbQPPFi zs~qJ`-?X?ctZTIJTMYT|fGkQJGwq{aIt(QV6*$!i5j~jX6cROSfQF+O3sgpfk`^C+ zxhh7H6;1+GK{)Q$fk^g~o%;kSHR^C0A+T~*v{B=xl#GO_22H0`rL z42cqrxW=XvRbQ@OLqn}u*-cO_lF03npY)!eT8%2MmlJFN`0RCk!A$tkI(={ zYHIJ$B8t#&rYYo*1jg6|rLPythaaptE*dF>h9o#iW#l3SEY0d0;cgYopo@Kk&Cv21 z6Y#)xmoP(GNoiUuk9z*^|3c`v)7?w4izFK5H>`g2V%Hu_jf=JethX6Dm>!}iWBBwB zB`uj4GC~CKxIYULLe0Xo;}+-R9AaZ;uC1*VF+OW~#OL%&5h1uK+&9Qi#^bPoOYt+L z;i2eT?f%|BrnkZ>zKO>TrVujsAK_eckK9glq&76O9T#DBeC7rp_&<@d0YI@lBuRQr zehqKO7_jTl$JKUi{}w4Rz)bqJ2}%$t-3mMAD{92{&|C47rk~)E zg>fGvB>`IC2eOth2c0P9HJ)H2GzAiF^3Rnd8)!j09(GWlEN~7lSwK5)M$-@H`39SW zKq=$7lSa&X0-=k*hK7eR;v-yn6IYQBgFWyQ^>+ub84Vj7n}8r3qlfl>@GhKQOT~%nk$h1>e}xG(wSl!O}C=&LOlQtmN?vk)Wqt;<5MJF6A~pzLzn9QcHFKHQP6ma_>GF*{^tVbf9cxZ&c$H9&ykNN4q&^Zz~=iOpy6{T zq|cxpa?KdwGlnGKR!qo<&8qwE&JKqJKzN66=~-{)Zz~h!3(UXjyiAWvblKiu$1X;s z98FnmHvc+>4_nle!Mj8 zb9n^DH}--nz>?4Lh9zzQDeU>pG)R?6xxN>-Hh+g{)Pdt!%V%ZQ27&7i zK6t1W9Qd5xewBZyOApdq1h=>#h#AaGb&w4-K1oZ&Y;mvs^YikDkP15*F(p<=g>p=~ zcz|B>eGz>NHMG85J1-Mxhf;Ty1QUcHHv7=|UU$lQ$#((ZHJ0?9iw zD9O09+g}OE>^l9#661E+X+!E!Dv-WXNquA&uRrt#0Q^XNbJzj|{hzukm)n#Zdy~`C z7~D(Tp!6uqv!zHP3d(CUSYuq?UO%k_>0PPMGEh=jp$en>{B zSS`#S6t>3?JA}*Oi3#hE<9;1q%G8^q(h>OG2U!29uc?D6z3ANPB-Mr@zz=C|wR=5vTeF;4LVrt8@H_QAFMa zK{@NtOttacS|J_R|AQxY^$mW2M@K4Er32DhJ0Ohke^3pRgJ1}H!^D4pVRY3>jTj32 zv{BWOha&%kzaF>a5`6BYP?Q^I!?LLy`N=c!kA!C;-u3BGu$^$`n4Ym5Dzsa8LyF7d4OH3ZAbSYQ;GhL5C2i;Gpp>Fv9k?9Zv^9VfJ1s(5N1!=Z>PM(_a;N6od zLu}YorIR2}Fqbjt_cU0i<4aucyuz(h?#YjMnoIQIfov|m)6;+DlOE)bz76j`mUZcN z2}H)k@RG<+?WMl&Y8aG?GY6WbSFV+{#Jit9XPYLkf0o0Tc~({uCo9@{6L`<)`%wA7 zqe4L8d!Fq^$)1)}{ef3sJl;-rsXf#gv-mqXIL3MvDwYMZ7&zS1>G@A3+`^40b7qkk z<8FK+QcyFz&H@?+D?Hof=k1~GaypM~8YvA#d6GJt4pHcr{@Fh3Ki$uT6_H`z=PRkZ zd}y2i5GF6B-TwE8Z+M(+eR|O%r@tv6Se5@Gmw zCN#D!ufA7lIMr;HGLruP`9T%F8|>eLal&3g1Ml5gU50kogU$YQ*Q-M+p(=gb9APLZ zARs}4f=-OZ`}uC!w!O_^cUnO(^vFla^>t^5tVm)ej{c*se4_#m#JdasDr@rmM;_JF zrCjpw{Mpq&ZTSCHF4cwb@ih$S|Fzh(|IkkX0Uz?M|Nj@$=9#&Nb>#`rIj;OWXDA>2 zp)Oo@{#=)DZqk=_FF{!^wMS38jH{L~EthwX#H1C#Q%t_R`*3`@zD|Mw@e|5E1cr*j+pOwDW=E@gAv^jkY zl`9ZC_ysv!R$kus<;GFoirwCAGa|_{>;MdDOi!jp2aPQheF>w$( z6G=TSiHR#)Nh0KrqrWMp^s!`qnX{Q%{!~4Pg*UH84)|eD+{LD|Ba0^f3f2?B`yyVe zO)8sF^bIpFKOY`0(4i6E4U}9!&SGPzYAL7UDC6RS`a*<{AK0H$UEN$;+v;P;lqUbZ zaC-8<-XaDi$kFL%iy~EK+%rJ6aHASKl3aiSk#crr>S_7h$I0a$P9zJr8hC(4ol6N$ zIRHW38i=vl^XP@kjYh#!(NHbDz4?xfm3`>6>h2;8nFeCT$NdK2M(NT2+}$Fpw%+yr z-hx*{dLv)}{TG}Nq}G6`b-@9gYHe%c?xSjls6tirNN3;QlxT7(Cne_s&w45Je<%96u`CWM`_Q%8%C{wS&<2pFb$Q3HnT+sLKf}R7n+3t#@7O?n zsk76Ta#?eJ7sEFAL-4aQwI_Kq`9t9v%DjgP%Y>pIe7$S4<;KlGIzgW>+Lg z=9X|)y&fHA-7tIxpYiL-4bQf%-+6u=9RN2hz+I`UtP8I;-mfxye{ZA(9V$0>=_`4ROe^)II7@F*eW>E4E&RoWI&lB?g*H>OF z1Jr2l9`m@gop8pw^IzNLq)&&UG}v-Ke2$q|@80s;t)eyigHs4zow?G}D1E}HsV;d( zefxg$48Vw2YBt@s1(h{DJteC*nHpO4EF5|JWVVdZ&$UIJRBpK3+&sTBB=7uB#U_#CCi;GaC5?U^%X+I0 zMf%72aL0T8X3bC6Wx;oOkE=p6 zD}D6vzRac;{qkO8{=0^0Le~qljB)ru=$z>!o@E;dWlQ!(^Q5`o&hpP+QN6}{k*H2x z8hU2^jBPK;Wp=9QdA+zF!lVy4h;Xs7jo?%-6F4sNB6ZNj{6xNAYN+5nI|~WhA_cCj zlBaK+F?6o7i~d;w^kC?U1S6r>`{DpmSlN;TvAZR z+FmZhP410GT?SogM9G-2S`eSc?kuRHFs3j+%@WG*hwdM#M>ok#iPZ$#L&%k?uBck3 zJ^XI0jHp=CwBB^wwalq}4w*XPM#9JMmu4l7?%)fPLRDZ?P$;i9%QhKFbR-BPM}eLt zAwhZFeN=!2hLBTGK=0gnxP6?&w-J$UgXAOKfX*mYW(PxK>d2+K?Z1wYrf;oCw8?C4aZXIJ#6lbyZ+uh$M`p}aUG$iUky*)m| zU(6K@rQJ8)9Sf?ajSPQYakh6x0`Lj>o0v&kF15IYw!K$QSM3%XI!r**2pfuu{tXSq z6U^e@>yDJ^kZWveB9-e|Ew}Z}$W%{H6IY4^MgG-Lg+L&a+@5eU!$TuHLH+KZ1M*Xv;n@@Z{w+wr zUF6#c>6epv!1;I8TG+l}Vq(IECH-yX`1Jc$s)t4qBm9wa=emm=P1xO(0 zKGkNXrq1VE*SBW;eS@cKmDQD-Cr!ULVC0*>g`xTCRSmTc}4 zu>gg8thl_G6KS#;&6fiDm_g_?%c1Jw(ikP@pYBiCoK`LHLGU=uJCftoXNX951imwW zB}K(dDsQ|2(P+_G)4`;XaubC=6i%bPHWE#anB0gyj|rHiwlBw!17KPpv)hS%>%J%pgRo&o=p!GitMSoMT=m?q51&u2BC(W*BC-@9Q+_qQg zMtUr8U2m?n?nCqLu6mIoD=z3#sGE6OulFhVOBRZQS>&y*xYA4)W_od)okI9$tTsZG z^G=_OvwpI+fkd8rIO(OgD_d^JBqEWG4W@P#sL?DalL}QbEBED9O z-D$Zko2d+O)%mQWuM&9#@WBdbiGGXA>5}Wh3G^lR0}?H;u%q+ij(I!6=IuDu&gNM4 z6L)(Z6+d_Xbg%u!a1OhPl1P=!7RpVejDa2hiEyB&1DaV@);7B(35nZkovMnn7XK#_ zU9M)Pg{;Tl`y=aiSDxm^6K>Byc(XjwAjNLksct@ty9aO3VEdDZ>h66xH`ju%AC$r6 zbPaM6|IlbyzT7VP#Y0x>PbdjKHujfA{}CR~qxp93`-WS@f$E_Bb^EFr*4!gC4X)iFs6cGc1-GzF< zD06UlVG^E6m!_Z^f+P=)&un;>NQj2Tn)FXQP)??zR(K*Uw{wdF_5q?(7st2^Kg5Y8 z3F-Hx?PfE$8}y`J$CNdV-x#3PTF+l{<7!IDt)RdF+m89b-h4g9^Lf{r2@pYixisec zEX1yCb}xkfqr+uv1RR?vrSz(O^q2d30`H}gpqar}aBzW!)3N4-B^#M){(!d!JYl}8 ztM@{9Nl8g7-4=kNdv;t;QR8IEwf70Bo&%P|!xN%4xAVzL|BwsojJoY*BbjuHqnw`3 zMVZ_X3=AxVY`O2O4t9+(P8U1ah*A6|>{S>_a4+tcP+YzJL1H&5K9=@Wx$#iKyUF zk*pTizySN9PWw}+I^Ax>7ceB{JdpkB;G}SYvB@Qn(ZBwJj7M0g=Ik2h@RF)lDt9@9 zo4YD)pRqX#Lgt{m(?H(IF*JvZybrr#+Y0sp@t2;yJ%@h_?$ela{h`Gs;ZpAdvjXH) z^3*vkM|R6;y~|_ei|DG;9>L_6l}{RU&>=sL79ufJ7wa@-6eT(T)U)u=F4-=_u{ zsH5&0!!?C}5i~t;rDV!`{AAdo90_W?k0zEdH_e1j$W~?Q19fZd&VFTc2DC%D z?Rz!3xOki5E?}ZA{se?Mb<1U&nVT1{PyHvm-#>)PNph8@DIPM|E`K;+L!r1@XyFkC zN|k)rs<7PwBpAdPeN=4Gm`Lhj@#cnxd^o)U?~+pwDwaW(BV0_|5n(ZHb4pbO_)gJmd7JX^GxTlU~ugjE)uivuLWv1*B|+_qEQ+#`|>CIF&HSwOiLoeZ^A{f72!I0oJ3mV6jNDTkZk$4!=#`V!Q_ok zB$=4jSiP{D_h;bL3BXm5K}>eZMd1{p*AdQMQBhUN;*r=cTU*Wja{T4A*)gpuoSx65 zVWjrkkG;`g>q!oICZ15q9)_yEyQCt%t|m^K^+)}pS}!k3?cq?hq2STd$>?u;r zJx*h}@Re6r)@<$R`Z}<&rl<2<+G&wOrG~Kh^L;~dv=g)^q~W?M_FL{%ThMW z7Ff8xMw0v38(3KUv=*Ia$?0-Q91q=!{MD-2X5H^dK{Uw_X{y`jW0Tkm-{38h>k|@A z6~3yh3jc>o>JKh$%mTgqifEhAVlu*bu3ElSMr4mD2uO>-Ujoiu`Jv+r*v_E<{mXvu z6O)tiFDZo|h;leVfoFfGQ>KvTe=n8Ef+7^{DJ>IQ7{)g)OmxE_JP6`bex^($uO@Ag z8;I?wpYEDd7(|+5S@5-zXVZlok`Et8C<7yaAxx}-C|Pg?B2ow5wBD+l^eol@Edv)j z?{rG6Q|hk^$*f8|r5}sugv|F@Q2}v2h=2+BL0J;&pf(40OjkjxAdHm+_h{b5P)6~Q zI|;bl6PZ)Bz0&!uSI|yLOoWo`?V-!0Pjvx|7C-Y4c6AL7+V!UWokFvf ze7>5o89>5Qic;`LJPX;eGj<9&6KIS~9Dg4Gs)H3=_rsA{|Jacc$1%`@tjCw|1jo;i zK#Q!~ZG3g{xXH^gg+pgFU+2!Op8}k}_i>nWfr^5xe_@3V@I9UJ*UnWEP3{EaDSAyBtkNF z6l2{M8@&5+i@G-kdu_jsDeM9xlInB#rnS@+cw7qTxZGlLp~-8TMgSM`+uoapS(Ufa zO-Uol=$Oh7;6tc%;bJkgFm7JWoko2A%fm*AvEmYAahJZ9!{Q*_FRzPVM=0~Yb@)dn z`2vy49!oG)N8z^yrtn={_JRoH`NOX&=NKEtxnTpb#LqF2{$JuU+CB}VsX&*IDgNcl zkHL#bNSx05vs;cL?UQgzYppGKp+*As!HAk~5cqNl8A0A)1aCnUKO43|fDbVQm7RIo zy+vjI`Evpy{ifGlLF>&UKZx*$xZv?J*R6;J(2G95vs2b#cZ z4Gxi$g(acxAQ|m)d)$^)=)SZMVf($LbO-m-*&{xc!>u^*({i>ftGOA`#2)}0ILBTV zRH@mq0{s&H5%YGMXUR?RyJPQX{TNrnopl3N{xLW=hM$epN{GrW@$U5Zp@k3&{(V#H z$CiR36k(}EH>Y$=IuIxoUm5PzHZJ$3!T1kylfn$_-xWTOCq;5A`S<;G(ZgF*ybip5 zd|-;Z8L?#`G6^i^SnVv~a+n1)PDsGM5M^Q^Yq<;-?a!$s2%`EtK4eCzRpv}RFUag+ zxD7}aol%%yU_};RV3R{)R=w$@`aUY>K<>*6SSxr<-h7?v8PAg*h%l2VstU8g9xHW< z`07wzxfEXD89KfacWlI6Vveg-1&Fri_F8Ije5#74Tm7kh?1U`|yAFKL4o*$>dPE|(Yc@qN&rEJ&=(g++*2cyFm!E=ymR>49geYe zz9W==EK-!`q8$DvdHeh61xYE2mRlgG55iAMiWV+XH>J-x+|wr{LMzra7&MSl_Kn$h zgTq~@1`apJ;Oc+VG1WbUgwLHJw4t9tC4}>w6TzL(#lZN5Y>65#jUY7Ki)3>fP4*%G zm*hkKeb!_o?j&E^?HJ1ALQv}xD{W<4k5A_$z=~^vOgn89SjUbq{aOC^phy8p#ODl zwqe*w7qc(L%F~NPi3;}Yy=TeQN5Q|W=@C;vzJ7Avp38D0-(wnDh<^;7?EUi-EKp94 zrjRx{BgGXGX$ncOgt(eC={p}txJR&hD*q@e#ThI!(E2#%=@PIW;S=R z5tk(G=-*$>K)Es1P&cu`FlBx#1;7d9(}eX1d%ri42^j1UKyEr?x}sV;y+NTRkZcoj z!S=>iSA&|sA@G50(!@dLarEJ+MK!2p_$RBDF@>GFt=C-8rRvo;QKKPU(nn#m!A0=9UR47br10+?Z-WSrmZv2q8uAA)0!Bs# zrIWW!@xBMd7D>oeV}OL0^3V=>M@<16KaM~=0LIWls-Ofg0+c%`Avohg&^}SW+hjF_ zANAvFHLyWUH%OqqGxhFj9Q*Iz47ZECu7pfF+y;FrgYUC3jz{#FoEBwPXZQA&hlM-I zS`BCV5Kuw3kVtD|oGPfE3jT>{f(CRNZ+@8uvRziA9l;abJ=Ep2`4i~umNrZ#XKtm^@=t5=G!7LWpu(!O+Pq_smS1 zzuf~nj~RFyhQj)-^xX~G4m95|F!Eg3KDQJOLjxziH=1Jbr^YVeq$9cICP@WW8vDO0 z%kBgcp9->ku9A3s6|@gd4*Xl!<&S!C(^pNWPZ);4zf`p4(iZ-8H?yOof|UU6D&!c| za+N_m?-W2*$S%`d_h33zk}a20;X?F!H(xu*)p9=^QhslCG4&yJSy(cQ6;Y$D&1@au z#g|N3u}p1~Pzs3_qhwJY5hN9{xyp^wR_Q&e1_sjkzT5m0QB9;DGOEIq`7mk&JQS*MH~14na)f@Zw6SR9 z=HlQ|oiFZ2q$z);f)YwsKYkI;lLm#k;p?mHWkO$6sAG%!5&%@c)izqeN3Ec5#r9AO z>%lk5^6NsbeSeZM2S5+MJT|Z52MzS4l>^b1OyoS*OyT>)hmnH41R4K-oXQ5nS# zbDU3GGy_K(BWy6=mYqSPO5vL11*Ww_u_DickP4wx6Cq=}^p{9Ck$}Uqsdw2N`c`6I zD}UBsm^#BXT`D1^;F$ghACQO1%x1TK%4~&IdG7n=KQr3YR^`3)=nC8b6r$sz2ENO7`9 zh{U)^u;n%(5M5NE;PY0thT#2{l(zD6Fe?eO z4`SNKU%4<3_?B@5Hm0A{Pe~Lp9Fmr0dm%Ui$jx7zQa2^D2xSQm4yH@MH}?D26)%^S z!JpnmoI`XrMu%cl2goV-*|oO#e&wmmrrnbsw_KDd@h6Os!e%8Ka^1Hu**{&mu>BXA z#CJ|3>bk1SE@Qo9?eda$=)Wp#d6ue2S9(73?%L}ji%iiht}h(4@zZ=BAt>t02I5X+ zDwFQ1Q|jZzj#NAmWcC$*C6u(Os@ zm$)qNEPVz?n#$(WGtL8n8m-EumqPI3`$r>p=cc6F?X8?-1H>-rO)093M$eDz4jDjN z2q~)FKSVdGPGtZnP{(4lZY9H#H2_hx^7k{i;BmQ{9Nj^k6R!Hxs_AEh5( zTx3N`#kGI65wI8Fp}vco3(cJ9Qg+-@U*ere95Xt{elE(6IUrGfKRcsF-#AB9|#gZmiije*Ofy%g+{Wedu2Ivs0w zAy;!f5fF&1pwrP%&8rV#49vSUwuY$8*BUs5V@C`)cL%UyUCQ6+E^81){eJ!KT|)G} zZXV|lkIyu%!RW^8VC-0rB%JJ9bkQIkS1f5n+NEQIP07CR(?|!QSA74QQ=vuKFVRq8 zmvqhjqQxyX$2(S#XCQ#-Fg9c)p=)7x5A7FCgJ?0C*?blAOR&V*&Jk8h1ul1Kb#nEZ z*MzAuXBgoc6GmN{`i1LUspd$Ofi(Z~sU3oB>6tP~$k37pds^x3?@%OHSw%HgYX6F_ z4{FHz`uYI5fiu)D?ba^FT5E=dg}My{SlfNP;DrG7q!q71MD)W>ZgDKk|zF(+GB$wZ3 z>jVn1_{%<&(ro64UHxMK?#ad&z1k(|m~2on1=$N?{^pr%AbbrG{}Q@h(gTFJ=jcZq z(JM-ZS-DP;HP;gZGXb@|J&6hj0;H}6$Ea+^r~T=4m+RG>zKCis`cdlEHcwq{r;Ad) zo2S&;gcqk8{h?OJW82{c@N}~{B9H{xLc*jZ6~Fm52m@3}1>b=m{Pk>yG5V8`qc$-f}2Ux}9nU)^ZWQp8P`N*NJXC)Eh?W;O}J_@&xQw<5-?amQ*(6Fz#V+lolK=Z=HHh z?~fg&PeCsaFG8u_(GmYgL8(mWO}cM$p%GbT5|C!^+1;wo<7BYs)3*2bxLV~p%lxg+ zl!4{{g<>jvNS=b>==0k9hlyV~O%AICy(Bu4?@D5;c2##qe{47JpZR73>NZ$pNW>S@ zM2nGhp|p8jWT@4(d+A_;ZsSDmD}8x$FpU8HxxyT%1L+ngF(l(XF?Lkf zmmzYUE|7cxJ=HY?y2D9IdxEvy!%iG)U4ZZ&o}x-+*|yKRLnfcYF|i2PV*ir!GbY5TP0;^Ko@bvmu( zqN0OGo`{>DhoxDxvIg;J0|)aOBMl`(lMqRBQ|yxuqvLOIvL=XJ5)%;0AJ``mXEbpg z?Ty?K1*5)QMg+yuQth}}`&%$Wp~F)2-CT`-*O>Zezeh|!gha!?J)$^z;I4zJzu)r{U6UcyDz+af06~}PPx+dSV zqWFi=pv(R4P3?;}8exJ-A>ujde?}6RVx?hBTSo9KFZWD`Z7;hD$TlUD#WT@>^dt2N zF&SBqDiUB=Xjwt>?Ce8cNlo}a)+8Ywqs$5Bb^ZcgB?qBS^{>j2y815ORFu8Fi5P@)#+3%g4 zA+m}QH(I7xP&NlL{0nDi6N8M3efMOX+GOcpu1{B>!}b~Qpfxc!1UubA?hdgV^qtRJ zGY-KITjNh#9<`jiQa}c@{nJ~jYDNaxkOXiVdFGmVklqHCwD5I9p+bjY&JI|1)8o%#@MoG79#w9H}a8W%Ym=C3ZrwFjz{!<`4`Max{i zf*KKVN408uU2z2Ca(@kDtTV zt|6OehgN09IW*{QXyeTBTQv^OTw3s^l|yd14=U$U<9~-~AKS%kE6a3({n(N-j$5qC zubx2voRTg-7%)VGJ$|%WTe2YrgoGwSSeiM&Y*(bjC62q^Qz-V2%}HAP>R{D`Byt~w zr#lB_PO8E{Y~R8bJ$K*!XfJw2ne^<`PV^}YG0$qWy01m=P0-}o9VUpX+Yi&~u6yY& z&USbCqw$ROZBR|nrmMU*Yy<&xga$S4@UO$D#1&3+C}Zyt)uhjW$a{}UbuzuS4Cg9k zNU^!KZm35}zyMLB^SZk_=6Wbg=HFM1i5I){>d}3TjbpJNV|K0OHG<{WR`Ye5nI0y+ zWry3n=|`BdTL|7m>uY&+Lyj(A4dDDJ#OZ~vzj@33c_<3!&@KuP%ElA@qHjHLp;@h+ z4!B)h-kQK*LeOKabhJPc+AmhXHjBz<{rpc?Bc*2Sa?Er(a9AB`|*3%MlOX0{BpFT@Mo(hyzEkME$tecO-P>j6KK;^#?6m;X5X zOHrxTsmf`s<3LGYn@#IYIn!@n)pGmXj_n#9d%u0IF!7p^ z+E=TjyO{rL8{d+&mm`mG3Jk&J~4kKq~PCS99@9J=-)b8=q)#_nhlbg+yW*KU=$x5MF$ z5RKS+uO8rvw!j?oGoRZMMxAuH41s{|zsF7-S~qUy#r*7CK@H-L+$xGrJwNitLvG(R zx^94f-g*A;m%%Z03vR2yjlk(OB?}9SqsPQV!;cK)e?A%#%#zOe>YZ-4Q6h(L?dv-0 z@njlvG6ge+h7P8N4!@0Ml(dC4y$vkBdt%kflboeL_{F(QB-uMe`+d;;O(04lz*FhV z(#L9}UIy3vzA5%YNtNn;-ha}?B@X@a^FzxOB zsot%iLXoZa^(lEOz1`ixoYfs%xcqI1G3Kg2JT++Gq=*`8oik!{jw(D{Ow|F)l#`p( zZa-8~RgQ_MNRX74v@o&uLY)c)+u?0gQBs5M!}yirZN`30%=)jkwenvL+_jEkt>l(> zZu+K<2TCN_?e&6UD<2!){DDs+3pOqJY-WYN`kxw)w0Cq+lo2Lh_~gnNX^1Sjxl^)R zNx*+aQGPz6A}h-R4;$xtn_b7dI7quEXC5anhbdcMON%T(*kaU=JI%V)93w3PY8?dC zO;yO;k`)*Bil)h-So+37%vepUt8U-_LT78C%lN~DG;FA!$BamP$$(4tm8v9QYFwxn zcQqp66yhA(b(3qlyAd~?Ei{tuHZ7S+pcgV4T0{*S^I?{pFEktWbm)L{hJ@$w7-aO3# zXtMFfHEB)xv!cUfM(QFFBh!d{0;8(~rbh6mBW3+MyaEZHhxRJ*_r0q&|4M}_cZt45 zhP}_Tc@9`2dD`9m`wfqeFk)GmxIX)SNv-WM*=ZAiPhnbm?QGH?K{(SY1PX%OQ(wc|(sXZSF z&G);=vE9UJ)i0lx>pt;661dqt>UbK z)!pBqYSV#LS6$-S-vxPP$!*vGqQOa)o$Lg4C#$=M30&H~`s-R~3yLXS0q3tHYsi?$ z3SQ%2f7^X%WrSx*c9$ zV|?4bH;<3-QAb_B0K4TKbw|e}7!D2*3Q!#!a~y;*9JW+V}x23tTaek5}btA498QpoKQ^es;YH5jsbQJ*sVSEgIga8G9dtG3F z&cf54XjG7>Pk@q|q}|TTOF6UKllJ&Wwxd@?zcay9&ccxq{WB+eB#5yt8RmN zRde$kB_$TASy~fQbO>-!g)G;!wDjVl6p(=f`XUN?N(ToIkyA1l*l9CMTXeX$aZf_9 znc3N@)MxhE3V*a6Eg|hG!YB9A!ZicTRS5fm}5$Iaz(48Mdj(rR66N^g`)7+HU{LO-l$U&nZ`D1XW`3e0Z_< zHdy`Wi`N(HOG?k}s0&UCA_!w0)gFN6PnzIl&Pf%zI7Cm(YavCnENg9K;igZzJw%tHeOf&MNBSDn#nStsL*fvT)bn(e z>Og#z&1811F7_ivFA21s#;q8fRNu*+=B*3RtjM2mF?G|Z%YP5mm6aHr%_bbriiwC^ zTQ?(C{%u|29&T82rsMZ|+S>)HC zNw+jOf4xHA&x;@PCw@KDI!c7X_XHBa)Qgnd+!?cu_#o!eW9%v_gw)}6yEAq(iE6dN z?abd(%0-K>$NF zDn}w4LT8J!HaV%oo1Ue2fV-wc@?Nlj%b&zr>+tx52ub7{bvK4RL?Mg8#Vtu>QJC9W z-=Q>0)L{E9B~4B2b>ph;;_`U5#RwZE1IFI7EYRg``Lp8a^d&%*#;6F9eZjszgj7HO z{fqvH9bYxPZQ`O&pxYj29BKj-rE0srtJF54$9S&Y<2@FF{B!@jc^7PI@9QcF$D9s* zzQ6|*kq{UzEjvxG(h7<6{$4=mm-m1Z5^^>*B|XKrY52>53}Q}A-(Gmd#@_bVC$={l z*W2zalOBFosU?(5-o>?Er(fXStho1cMGJJr9+ikA1+Ms)tV%(_F`tLat+}z&N44!{ zt>Y6-oO&0ZaKi^O{y z)dV?EaHy=zsA)(Lr|}=!YaGnS8?yc#g$2@vJVWJP%r}t$6=pxmFAOa2ICbIxgfSkl=ABjxA>6qo3dzOxu zlgLZP8@mJ!dWKSE$#7+00@#Yom=p$zLv(s+Tv>;bR+m-aVR_PHE;2Mg z{B7Zvv-gL`W>4&&3ybpukHh4_wYA{bkjgwu*UCRh16#6uI0*SL8^RR{U# z!uF+C8u6dh&*?ebs{9q_tArl)=EkPtiQ`H%zfHDB96~Kas%_rOP1XkYW@kr1SCjU( z*C=h?N=Y2t*F)4xi@mlQ-Z++EI%AJ2$`sE_)K;(W$FR2(e5KNxYJJ3>mO2G)tQ8tAt)yc7@haS;?P^6BGXT4{P;r)s z_Bw&%&b>cIC~LTB9)3j|aO0OLpM=9Ctu(d95-!SLWzw5dyP&0K^lU`&KqDV(qp9(6 z$-Rqr$*pOJptnx?4)^HI!GU#gs4uAJ7SZv4|L;W8?iY(@r@O#ZASBsQaoH&mxe{Cq zK^89%7ynm>?G@7F8IwZ&>C&g;3*c_O&OXF$xbDI{wbC@Xf;-&ABe%9`AB=JRAhqIY_R7^Mf z@X07TtcW;>fXk7gLfgRb7f7>4eWi1U*q%i?k815sdcr8NapD*$C=JJBuS^tiT*h15 z_H%<|FSwKy>NnLMOn{NQA>#0D+{=9a_K8!o(dzdl*=4_>i{3scm8ys7N^w7(cm1vX z{86RVY9!pmpNbQZ_(Z`&nLo>2>{*b(<{}_%J7sdPzv!csAEpKPs?mcZW2(Dffdx(G zc@WsEN=n8A2WZkCB^+BVat*RQ+{7)(@1y@vTmVR|GJx7m#gRz<-nkA}+~|T1A;s0< zFBr?=0oB`h{jNK`M{D<)pnEQxhY*VK&UYZCGOh0VswyJqI*2#gYuE8CkihwF%7PqJ zhz%H^A&^|Nh4mLtll5`&8gy8(;JV0_%OY#-|DrzkDEU*~1Zu7!CP>*F-lIRJTOpW1$X~TxLnTU-LCCS;P<_ngJE?t8 zQK{2it316si$QC==U>WO)(N>~K2AbkPap~r^Y)4iNHo5MzMMJ{(gwv@7i^-l`aN(B zv60Sg?ebMsx?!U@;{am7OXvlWrJECqHW>`-Ysk}|9utLA;eCaU2QM-9nB@)^w4Lky zj$WscuT*b^{%VdX(%sm|`gA33R1Xw8ZjfLzp*BS#-?8(@{L0z-4G`@RE=yqjv<(}0 zyIuB%8dQaqv)OQc!n{#uel7*NW$f>lD=(va ze?P9mx@O`Jo#Mb;Qm8EEIDK4QLqA$$7EC4NT`QX?<=Dvvj?v^s=2Q z`cxIiS@MB~k}gz{`fvFf2Qv7=vyV&d!;Rc)X=_W~9qdVGwD|oIR|*3~2207D9)l;~ zcL)R;$7tL+wB^c+qvdoWKNly>*p*T2D;sjAbI_y#oTZ?t;}}yc*u(Ycal4BwHw5-0u0dng?-fO{YVjrGwKm?^*a+2u+Axh;rS+zp!d^}n5_jq~PfCjk_^bm|>1`Xjx}*}uf9PVut)NP%4#kzDsjbLc~udn3eUmWNSV z;B`v0ozB-OKtqva#4m=J8(zVnMB&n(NdVSlTJ$_ilFNIqpek-31Y~vfq)*6*Fc5}F zGWXvj4u=+F*U35TFFMeVIOF&skNwK$HjAqMBJf!4#X5EEYOv0o9OC8qpZnd~hXyCt zpVvMAIy0%pWP|jLvP4X1e6zj8v1{+LM zaa5=u3KTGd2F2?6fEV0Ag|t>&qy6nh$QzrrCyFgNvcCWkeoTWS7}x%?X4C%M10h)Q zwG7%slzhClHZz}~=NWb~VCVjj4w4(fl--qK)4r`HZKVEWYW+T>e&x1fuSFL{6e#L8 zj)$lr3>?^Y)Uz~4w(J7G$&gc57Q9GI70rhxjmEmPu)y{a@3~OFZT>D*QfGOM%nN13 zL3oV2<->p!%s`Jg-IsJ-xuxrMFtCj7Oc9;^VY-x;g>6-BV*z9w?kgUU2IXp%5 z`8vLvPXTMq|6+GK)$tg&ulIFYgLOjR&E}z~k-J`Svp!+WSd8B1vLDcx2yXd|u@-&I zj7^|!Ma|HF*crAy~Zk9z@9$6JVlvB4Ru zc{e0t7+DmvvU;;`CNGGs5x1la?GkPs_ZaQgzp~-HaN4P5zOnrAqo(dblyQJ}L-9;_K%vCcm3HtvH0g59E0xwIh#K%%%m=KQ zr1aRF222)Stk+!vg(?3n^me@FG+fMVhx!S|T<*jfQT5|G?bk*n#R-~!Oh)f3qq16P zy2L8EdS&o0va!8c1j%P}*DIQ;U|+m|3BR+MFZq22(RT(1d>2vws(Ml`lEjT<8$E(T zbbK)1C#+e@zjW;>E0Tc_d2>;@V*oWsZgH48UWZxkim3Cp?pfHzF+gjSzld2CC(k9X@peFC`LULPNmVOsae zSJV%w+8RE2p~FWE>pCY~tsPGfkv>iC49`=Y=;F-A$n?p}xaf&5^#MiQ+$LE9N7w+L ztz?30j>ngj?F)fFdM$4{+`3K!cmy7fVcptZ^A|an9FMLLvOJbAk$qyxpW1;XF zs#F#Q3OV1^bYM8(FzE_K*DWkG(tB6lOlNJk@?Sf-Y4Z*BFKRA1O-1&$p}=UE96%KW z>Auzz<#1EOJC^P<{|lNu37YVetNQ>E>jz0wrPVQgU>FmC7oYQw(4^(to)9#k?GZ~% zN5%yGhzzOD5WaeU#?Gm!sfr$)RUMztC6aOE?#PM?E7C;p5>2XjUnu@64;KE=5;8r@ zTK{vrF~F{+9%4BSA=l|kqIJ;7d>9JnJG257ixQuCT@Ng9W#|R!u>r~}j;D4tk|DUi zz}}bji6qed%rx#7-*vzIKX5-L$M88EKX=N&xr5j>BsMCg<%%j79f($GK=~HyllQB% zq1@yVY;{Kv2S6x8ZJ{>_13WD;&#LxDCJP{$m0rf~0cmrFCdKMi%brL~h*q8I;{=Px zF%Iv=i{~RDg|3&r&5V{q_lFotA+T)dV7oWW8iTmLTl&+`B^)X7A`D2G=s+#p;eP)k zhW}6sZ!QNi@S|LnslZ*duIU4}`{m9RCYN61^C2H1uyYD1SB6VXy~O2~r^aL*p&L_8 z7%2A_#F1hp9YA_5G|iHl?^6I%iOK;BKImp>{pwr4FVXYFa~w=nRh@?%FTyZNSrJ+c zBbEXwSiVkN7cGwrudi#4a~T=C;bPKXtlXir7%X%M5FBhJIMX<_H@&v@mZz|lN1Klk zlSu>{L2%?bF3c%8rBTy4G z5FInlj!TLO(pTN}+AcuAbpNW1H2(Z`ezLI6!tu4%V-Y`d!J{Bfw9(W~zBu)+r?auQ z#FgQ?ShcQJ>)`nMcA3oHvi&ch^L*l(vkkW#|gGgUr}hlDZwZO6+)Q^EU*iu2Pf@LK%~JCb zB_jnh0fH>n(JQ!0atFCowCS2MAJ+z?#H{Z=II=B^H|dF3RTjy}y;_VMCr@;MSU6GQ zrL<-=7rO)&6hIZ|hEZg2Lv-A%A4cim;1D-hCnEtjzIRe>5LO;HxOnv{~J>;3PBN6|l7-8dn0&Jg1>LRcthk1V<=$oS~=;#eSC5y#2-Ic5oFo7Sn4pGsLu0T-XJ9j!C7z)l!{cQK=;|kX-c_(sPkM@>=I(aL|5^%^o|K5 ze*qF3wQvjv)ans9yGA1rc4DUlhiFNM(YR>Iim3IWXLNV5tR;i&yl50fKKHfE+KBMP z)-*^mElj%Z$0N_F6bg|CkhzcglCG%0*ES2I2eZr!a44_?_{!Eo~SQe%`Nxs$I0 zk2s!2Kcl4H$Hn`6$i7(VUEzyl_c9=cmj+#yci+9)artUU0OTz-Fmqlifg>0=kR!%R zFgkBYPy!;x6v&~S;#3J%`|R<4$mZ+Cm$$w)$y(9;b@vZJM9OH-reV{?%YtMXcg-nqcz3dXkV>egNV zX;>L4H#ri%HBetC{PGO~-|HdA?45{sH^6eKDC_;NvgU_8&Hs&mGD2hAeHyAYdo+&- zsUg9GF>q>Tt^Oc&XVc=k$o)m|3d-R9G^naU;w!*m?J~(csJ*ZKg&{d%?GN_a5e}!8 zK8^T&h2FH4=Cy|+X(t@e2S?_nh+-b;-{R?%%$nM&Z3IJCn-=ZdkWGJGY{$i;Xa z{%egLa*orq<@=V4?lqrVOzZ$LSumA8-y*9$Vjwry?dxirl>@;>G%FzzjU5cptZwq> zhUXd$3pB>aG&!CiEC(HK^q~?AVN8xQHB;sS5A#Z)-{JtcU%^>X(w|Ho;=*x_;IWn{ z_3KEVC)YY@lT)}gy@<55YfgU$dgB3R+CPNXpfo zrwvtamEdEOW~pL5ws{-{@7r^CF6S+PMl!m>ExExp8Wbe~(_9F^x?B{Q2qpm(!{ih` zZvy%9s#q&_-gDFz6 zLMC83VEOD$%qG*5A;63nKC;o;`L%6v%7f*o>P8vk=Jf%+Io5HI%5fGAijPFnSM>23 zbhp^WdPFad1$s*I-2(XxgTh!o=QQGi{|s?f-fS>Ssu}0Jsj90>{d@L^_}_xAFQKk+ zm8!~KJ%cp^c!V;t$caYu(JgL~gorC}xmNSl$%bR9Sue{S!!Xh>;M`LlyB&Q_2x$jP zm8gJE?C4T&kt>gvQyFTKY?f>^0gWybleX&D@P(FIhv3MLtm8^O6sADzBWPxExmDSh zR)MZ`&2rQ2yNsI@j0SrRt5`RT&=4}MB$!=ij>)#i)*uuMn=Oq-H?zm1y1z_*U-bMn z%bx_Fp4DMnm_!s52A2$wvYq-9v;!)tt{rxZ9@TQX`_~@S2m+5+;eBMb+h2Qokwh|t z6PSI(gkauDvVL}}&wjDOE3fZ@D+YaAcOu1$ecf*7aU3aSfq5@4JmauFk4N#3keO=@ zoq03fxIv6wYR{^;MXU(!C;3^B2)Q$@u&!uR9vPHpD$bv`xiLO(V+U+?{a~nf716q} ztT9>thqAYfsw3#OMF|pikgy5v8Z5ZGyKB(kF2RCZfQ`GmyIXJz?(QDk-F4rceCM9` zew;hreKmS8fUfS*y}D{u)vUSZ`o*NGQ|R5|weWWVmY_`ipMx5t%4Q2E_~nD(4uFK5x>4pgZi!)dnt=T$oXcB-_qj#o?U|QT90NbZPO6_|J9oeiSSv z^>o*wQ@|rFHjORcvWUv=&!SP`1!OfxpJMapqESrP70e%nWlJogsarxNF#O5};g^VV z|G-=BI0jxC6$Y|RkX0TYFNc$4aS+o;2xL+-R56~wHcA3OEMyWUlJvo!t4XM4AF{^| z&|fEKTYvvVb8btH984L>J6s_v$uOh%jy*7oUOqZ|&Cp?>JXts?S0v1O&6bB68Lzo` z41JVK=HwK#c$WJnn^H9r=odK+vvdWlrBa>HA;v_|RX9-XASp* zQ2syNYJ{%RMp*8wsJ69LdSof-YQD2Ka_mh{mBT7iEQ1n(HdtS9_AmNVd)$h{8d(mB z5)HNOo1P|PE(viSeh=CYn6T{FUmL;uC1B-GuM_69v<_0`&>c6xF*~3&ldm$WX+ffJ z{iungj|Y~iI*+{It4M5Qr)=4;zVL?|IwX@0!e`oxe4(;}1V6L~H!6l|JB}C4R za5Y(pKV%s*-t*n$7hqhJ#|rQ@Vx)ZI8;B&xn_pH4=@b9dSY*_{;(|xhg~WF~nKM%w z!@&(D+#WeFZ;R*y8#Z9F%9GbACO8P6=Hg;)2Ef3CNyoBY{xz4ZaTICf(Ve?%KtHW; zMuo@Z02b%qd#(+4ctc9a3~B)7zOPU#dPfh_lTy1_?J-=ApYZT`YVctxgXJ6z4<`9J%XAH#27!;^!<=*O>9T-3CN*dR)Quzux}1JSl`MJgLnR5?w781z-}PyDP*L z*DcvNHh}btI}u0PEORF<{J<#QCo}vg-XKDl0#|GA21A$k5AV8(jyMwjBnom`DW!Pk zB;58%ne5T6(MezMBq>Qrr1?}ohmTOLe_{3TS@3-l9E(yv+0>&cSMtUM-+k|CvX;?Q z+g!eHI4k|0PW)r-Zn#c-RFUfst87j|8=Xf_Q*)Ffx0uB)LyORpI(h{OvyDnD zRQQN?k|K~fl+e*w66ny=_PVilC7^>8&4d#%fC~(!TJZ(iL?>sxBF)>qX7&MHHJ*_^ zNI1I#!K)|YBJ+KNImo4?B}|xN`ZuO1b4da}7I5nkrAMDI2_HV>MH8S@f;gFA zqYkF}2dSo^0;~oPRK!b&RYZGk#B<%_l!=YwrO;50gUxdgE}}pvLHV+cCk5dcvA_As zfqMmVF+ItoImk>=R8;6{PRoTuLzn&P+m*1jUO$paOb_5q+|5G-Q2D2nB+oCFr~)7@mWp1|oQ#^_N4y!{gS-yrNdTcqj*7gh*`Ex<~E45sXk>=%A>G^5R39s^b{%M{!o#&Ce%4;V=}#D6?C|Z2d+=c61H;*L^{J}w*tM&P zjTBa26y65rSEMA=c^Chpoo;4~p6y&dnA7}fb-UTDBlwX#46!*g-w9LF&}X}gYTSBrP}c|?a8 zkae;K)*r&*9XtD0)XIBMsK>)wu_U!(01bWHL*r+zWHDSu`%(g&l^geTBwfTeDTkf(uyT8O*M$B8T~a%X>dy#m{QdDCC_I+rz*xUI!)qi9fSsq0j*?2HmKYC&{J^0e$d@FX|(SE)-^#TA< z`bjBBk`z$>y*)bg;q%Zhj7U?84DNjQHLsn??fP8_KVKaFOUY-Co))Y$OZ*9ytk*Wc zU!y}I1jefXK(9;ZS+`clozIrUq|=4#H(rjs=a<75h!4tD`X}}oO#}e>RBFyWUf(ZCPsqZM_+IoK)#{TA|7`~F`jr2r^skkE3Th-r2p0Qi5t<}_ zrOA*gKpr@K9l$jf{_k@K?!r5XD~(`%6ScLTDE`+Sl?j2DK@|mo=PSszTeM{1?B@Zs zI4MYdsJZ~brV9aE5#rg2;L=XW*5}zx9sM&T@*WcY8!cNvHnrG-oW}psbWo^$wqEDb znL6fN>qV)M^`&&k@kK{$Za*ZtgEiQ6D@e2j@$UWESD+f=>nMD*0ZF5K2tu! z@EkvzY-pW*Jqtq3Yi1vCw#FkcGF<6rJV-u`y*f;Q{AP0?qW+8Iz&4xt9OqjapUF88~F{{U%1iG>2f=qh5h8^B%2DSA$t4iGm)Qe;3p4zpIyZ0>`5rOI!J%UJ z>exE_aN5qNmFGw%FV&_t*ekGW$ye9*kf2n!Ttu0wj517K0S#Z5FovAU9pM-4CuWZn z6--?PRN}(XnVwQ#gpW7Sw=?$=L7T_d4;v5*HC#Gc4h&K_xbDjY%KX(4Z!z%u9EtuAj6{slYQH$=rIE)LVE?1tp6YwA);GafG_Z~V)DUJ5%9 zZWFAEoG8^t1Pc&DIPAYZ#oBnGIH~UrTrN+QmQ|DKn`6G5@~f_td&E0)R*?xyJ+AE_ z>{><Ev!VK8EBz`Pq=BUJ(xt%33)YX68$8$Zwb3Z)aq5xxYt6EoBK1xI_8qJo{Al zwcr6hXXa>r?+^n7WtFHtj}$?-kCpg?Oqgw1_H{~a&gq>{j_SAfSW%cM8VsctypDaw zo(|%5I5n!b{Su8W?(Yx8I1Nun912@K@=5APR`h>T|I`DFvnge_S`H!Lq1};L3u+++ zw6xc?*0n~l{eZJCCE5It!qvy=?-%eWHH?WU|FMS=>F-fZOfC`m((X}X@FguxD^3x9 zV^M(jCsY{Kc$M0q`Rf9ra34ym7RrQdiC3Wre%AT9*Uotg4ULD5z<84>Iij_k_xeDS-!aOvhgLluAu)cRyb`5)H#~z&?2PdfL8*qG_ZgH8*$P*M4XyOIqPngM1z7mNE zTI6_hz)DKQNp7~bCPl*d1dfjV5F|nQ2jB2ra)w@bh(c$HnTN^-J2ShJgJmKt-q*-T zg#dAib*5jC6KJjim@ZT2xp`#^9E_#SYI}!hPf1{J-9qz$zNwm_=gkisH`Zr3JcXt^ z6QlHhNZn0EMT^ME&D0mzHdq{*e$3!o_4gM^T4q)^Apf`ZKqmg%(@(gpi@$q0Lj+5S z$j;CChz@q6fldsxlXNTrH+UAERfssRsOra;k6dFI`EYM7aVGK=fbEn7lU_UTo&Bll zCHm9DHx86%mM(tsMVj4MpF8^$o#cUsrP27`%HXfA*}0uq@3--Ui+rwrBx`GS&0g&V z{_c7~NvFl*Stf@2ri=#5F8*WTZv8AkKZ~e=qfCWb73TG82vg^o{+q$i{pCB%(~BE~ z?q)ZDFeSD*q;IZwyvs?-(o!TK0}SQ)qdjC1=_pLlXy|B>xI}s%)Y(n-PLGZDLp1x; z8=SbBp72sy$4kBq$J7i(jUVtdJvcevzjHYUB5F?*?5fFb-mFcWEpk5by3;7vy9n8g zkJ9P1SZto4&aN+&DLNgbag&&!ekW=0b)9MyXhtqsnxCReXcTf27O%AZp3%no|7wc6 z^NUNEm&LFKx!BvUsfx-k=K_Ml$>XgZ^u{+qI^`bX5_T=^tt{=eCs|3XYv1KhVV{hP+ILTD8`|YYfIn%fo06z$DCg8k zFc>PUre>F~Z2;W}&9K`v0kZmb^=6*&h0Z7_NMz^WfS}hW3rtY7^O13<6DVAMV${I% zLX9NvS4^#glDWpL6z->KI*s{B@@CJOtyQ&|?8&Cm)c(U1H&2q?w*>xD#s<4@8;iMK zq;E6JUC&*42=#*BtSw`@CG$&aLJqGN#z=6bfZi8UVrz5(R6%7aO~R^6Xyx7|6gtj6O`&?QXvLH?a4PGAl9R^X?{j1r@ggXDihrVqbm=%Gc}9pQ#-u#S&mA5;9y~2P+I@+}nOb3E zbJp&9nRIs7T(9;k!nQP^*!F9xjq)h{E?TJy&;TVj*u0@wN8h1Ln7*&?PaEy);)OpJ z10%mG(e+X3+vHlL1K^UcsIhpIqw_W60ko4F3!m!8#BEh99u_o-^-B3;0e>HjzN)fO z5*>itZnt(aBGaFHB<`-IVH|5KxbtVSxU?OQxvH!P!V)K?Ac47`9$(;S^mqy+ziKE? z&pmdr(ZAfxRh}!c-4Yci{n}0P;}(hDOGCbp=r@lNS%ROcf6o@0vIA=tBQJNQ?GciJ zl-~upQrr*#R7;#>dVo7MV`(A-@+Ki(ihTiR3b`yR*K3Ng;3>hu#9 z;m5MHqSj(0MG1?@&gZVAI>&4B#2@Hn#if-_&X=IzZZ7AL5s7QM+cP%3p8{1Cwx+w7 zJ`VEWr9sZnt_L`=jp@dEmrIs!Ihuj0~tNqh2t4e0XFLE)hu zt$s3BLk46xg>*xcGLxN*MIJvMlJSKTw#-(oi}V%(sV&MyLZ%vW-i0^bIST+T!Wlf8 zz!6v;K|yJ)Je-EIwxBR7yLyVC3zSvsA(C0%c?O;}Vj2*>ByaVv5^rZGxS}+V*3M(o zXN^_ikDI5((XZ;yV2VgCw%nN$;E|RaTRk%Pc5h&Yr4IK)&@p*sqWV|xTh@Bw7uDGU zpbG)=t+${-a!HCCFP;&A(Nff2WTK(80hJV)k7O&B`LauUg(nzGEN5!J_4&bhRr{;v z{Tr4`nwIgGvg0NzOVhJNUy(A=$6*prYCw{CkR2dn4Mv7AQK(=f|YDEWTO z)(UnLqy==IGCZY!#D-bsl{YW<{Vyvu6_3uJeLV8XjSTCD$$uwRyEpE$%arT=X3|CMV&SpMU!pq$G@(krlLp~ETnGS$} z6ly!vg)G|{=0O?XrHq^5B5X6Ygib6fUeAolMXndwY@f%Fe7J3z0;Hi(a?Q)S_Rb;Rh3weW&{yI$C-QVG$8IB!*;xM}$*5i$j8qoroU+RRI>N*Yu>_wrV+JaOFxrgFs* zi*g{Hz{H~TSx9o1R*Zkh(c}C@^c@8$BtRtiRAu}NJ#K0%m1@QfH|t7vWbh)(s*Jge zr31jJrKQ6+Y_&1^zf2}nOt&8o!xX-V!tNE|q3GC-TRJ-K>T0T$hgXK7 z+|_qK6$x#mN+kAce@+oxc=qv8Fw05!*Pk^3+tagrnmjuICFUL@VS2h?f>PX~-J_sD zV#WvHTSn09ua?FfOLHidn9p#MLY}nW-B7N7x62!z_#u7gXKgt1@BkgyBlQh zhl0#W4y13S#65(3CA=Pey*)fXOl{i8#^m_&5EA3#m8$|n*al`}Jm4_z-l^bJ2f^_N`a zE1lVwo7^T$+({;0(Z)Q-b~nRcg>(iRy-gO@y;VHgua$KLMwZY{Ek&h8#mrBElQ=^` zX3*`K(%KbhMyahlth%}C&PRI9OCM-&fD8x=sOMTnL5gHM***vKaTddiHHVYT zt=Db@mvk=W#-=3^T^F>>;w_{kNkCo@dfW6)dP0Cxd}48GaulwDqHs>@}KyyxUHh9x{E;nBDTAFaDg<99WG%)dU|FMO82uPBH?@3fypl#K<57p4YUBK z(h=H;pM1yHSzf_lvDz6dVqW;oEw4Q6Ap{pPADWQ~nkb z*AFalDa)3LJ5=);XAV&#bA%^D;0irC(XT;qMXAhII@%UCP2VMMv@?rpM^?9Fd&-%< ztYzax&HS1_&6hS=D4ViY2cQNlQA2MghX}GCN)=%71w5R0re%7aeRi2xA61Do&61xe zQ^m`?ICY_ZKESeJn6hrzHc&6U)TRd!&EnFvS}*&qDk+zwoqnG{E38<1I?SJDuh$wf zpTuAOZiV-E;p{t^Fs_TrUBGACF?^Ee1fgifjWF{pR$A>RHGgMUYhm}bQ|3Qu?pl59 z!MU8?qg?zNhZ?T~V?rP^OFjja)z0T3aw%KE*``G_8Fk4rC2X=4qJVY|JvxgPRX z6TE*h1wrC;N&96_?Ke{3xEn4c92tsA0GdSXf5Qp?fi?gjbl<;l6qIlE%l|&*nItGl zEcl^!)w;vmyexIBD+?as7dlzp& znn{@}E`uJ*1h0_M_7+fdMzm;iZn-XETc5XKXgbby`jZB zZ;s)|ewy-8D;V5-Q}Ya7sjplAzK}YmRfe5zGb`|Vd@n=T^89BA4Mydd7bsmqbGyD| zc`sYw_@wjqy9quzjjPfhxC`Z^ZR9JH0=(B6c8`jP+P7++tK4tiMu?ihp*7E=3kk;e z3}qukIB(&6Z@(+5+MD(dVmPJpm5=#=%X$w~@*sda-_+^tUu(su?mr3v3rlD-`BbsR zyf&)X@g(Fkczd<9kr%XPc$!ua+XsRa&=21vk0q1XE z=(39sB+XuZ5B0OcUP<)DJ$R7r?e_0y9<+FPvIjumpr5k;jSv1`!ifL>s~JFA*#84V z{(oE_S+L470Sz4C1wgs3jyRx#UZ?TemvwM4QlYHB5pO$JWIbI#Kh)WN)QbHqgk5Q- zvC@!be7Yuaz^Aj);DWNi-tRa|qmyW}!gig7=jCvg-BMTapcNBCC9y-qk!FCsgpWM0 zx(!)wAKi-QDCVDg<`8?wDbXf!XN*g?(=g88&n?eR#~W~Jmk;XH4t4!qosVJ;p3CTk z!WodpsH(2*qda&HJOpkqdZm6(95#6@(m;|aVrluVN`ukzQ)c{YnMtZCEAHv7`<0XX zBNpDbtA%)*eu|%vJF=$9Wlazv1U$cX?1Ef;u}P2k3zUZ#M~#``T9r$@Uxtb{yudKf zG1k^8`xF;_jHAks(fit?T^HrONQ(hF0$i&evjoa}m=}49xT?;#J&3BKxh+E(^Q3?` zV$Cg_0ZoRz$EREVrE7Gmmo~{77{@uZo$|Tq!(GA7ZF|y2kaC zk=?1@@7AD!59NJK3vXK=BA3uP$W!m=hj@lk?NojxYfY0yQ7Oflf8K}Zk zN`5WuCt-nDuU_v3>@;33dZN0ClF>koW_6l1L)x|YShgP@x3hCYf3CMbLvjFgZtSgQ zGa}8@83yDQ1Bi>#?Y&D>b*(iXwxa}??uUvp3-b^L`C47hQPpuj&&Qm_{`@yHTK0C~ z&X6GV8a1%LOCLD?eC87kOOo-9MRw-&jw%!qh0yi; z9K}3!-JF#~qNuR2U6;u2oaj_+Hk@pk~KnoEU+bv%E{`~6QxKeQ}P7zmd7-Sh86FfkQ8MXpVkU-fJnoH~# zAWWit7wh-eB%IAgaXx)&3xqz?DUEB+@XWUY;1{c;Fygck>d;Wy@YOZTM(QXsJq1}_ z&}Ej-TS4XVg&-~jgl561@vivFB8N8gO^wYS8-qA)Vw}5dPAf-IRSg4%xj_?3PHyJU z0&f!E~}YEp)M~=0m$fv%wOL}Jand|1qI6w zZRRYM)RAp|OI>Xs5m`MzW&J+gR&4J7aGsITI_-aU6OgpQ9MhlhMLh&LSP+-wIB;t# zN!M)!=@AmyIczwj!#IK zVi2|8E7zQ6&`3#UHp=|5D*qtT!v1PB}W*h|E?KRv@2=8yJb^Lvt~ zRk?+4ROd7a=II?*jva zBrZhKnVni%TPekl+8)K3mvfBvLqoHY4-a;P9giTArjL)8A3iLecYv9q%#qwYPzp5L z#ZRj*E+!`#tuOu^ z(lb3hEOc~hb7S93<&!ywTnqUhF3?bMWs@00Os_F366v)uQE{kQNFtDG>b4ig)PJjiJ!Ced{A=wTccomO?S2J)7&>}--)^sq zsiH;Is5q{EUp#z#Xxo#uB8Iyg9De+O-{UL!7LKXbTi1IGf`n9{$+e!iZ`gFkSQ2Kb zki4VvGBW<+hH2_|*VjpV#ZyMgDk{%6he4-1tH^$#I!b}26Pa}##pW$en>*Ah0ZB|k zA9AW6zmS0raZ&&^jKAMr;dhEw3&QGpZ6HY=4L0$=`$iZ(%zI>&raS+Ek|O~Dcy?Qv z%`bo-;0Bd&balf?adjnB9!RW#c#up#ZPIf`S)O+V!L2)7%KR9Fa5@jwLLU%E<>bCq z>-)yzpG?)c=;*YK?HuDG1bh-E^_7?ZCYQ(@92}g>^?eoAniJQ#g8Oznor}!3;~flq zh6G^4?VQYeiCR1K^(pN<_a(lb(4ZS_qRp=n@H%8=+ct90FPDZ5ZAH6w2vp}df#}5Q zd$_xU2V!Xy6PugzcXo#2oNclHnpe@Bd5{nuk@O;kA2L$=k3xj#F%F5In`RS_*W1mW zBQthkLD|{*;w1pmI7i28b9XnCK6=Qudrt1;yCx(QsGIDkm_NX~#hPv4^W+YOuIr(b zyDOHPFmSnhGD_f0LrZJ;dAO+EkcuQcywsCZ+nTRzEF;a5=^>=|hTQyltZ^gM7RuuM z1r9Ofqs#eJ9Eo6WR8AKrj3^?FumL2^?zY!{A}AnWgyUx;+B+t6`yIRk^K=3gxRZX#I?*AI+WF36StE2-?ewZJzvR}T!hPSA_zEx%A8SCz& zP9npUV;En7rLKxtCLo|nY5*pOD`z}gh z=@4NqO$)27YdIMO%Gn>cx2Ewx>5n8_?2E=Wq`wQ6%3SnhQs;2L_GjD?a;H-iZ&Wj} zwtruHTHNt+2m!$c+>pVEpL}W5Nqu{JN4mPDgd&CQyzre5aLaBnU+a?fHd8ouag73` zW;EFvd)-Eaz2!g&mZAAz!2Mp2dI(?$4blGy5dN^)gGusI;&klHN~P8pJ!R+afntOa z+BokX3mU6W(OlX`?H{~9?(zeaE`PZxk(EpQn<#{Gq-Q3KJgE2} zdBgiT;{z-PjKALo80dxg2TsrMQMo9VPmu~@zVFAt>Mju-g7E=@7l|?UMT8H&i##r; zE!ETTtqoaX@bi^pTrz;`s|xg9RNf~UFW;Sb^g3#p8hc?y*9avzLk6v18i7=jk5iO- zlOlB7B~&aP-y1s7aPI9b3}re=-p`wLLTWcgw46D z?n^Hr9khFxCa+Q8q8!-me9AqY-&RCN^5g!Zc9n0We7B&n90p1thRazPmSm#yhP(4T ztXW`hNvE!Vt^#(~^RwsnJqYwmUq@)DM5ui~s!r1NM9x9fL0g}@5+^n}C-Ux%w_*WP6 zgMb-6kcO6lfif=I`{(O~n&5|=bdf@okU#03;T|ftUA0jKEuJUKUD@`#FwvA;Hn4Jzf{+237`g5VQO1 z^DPF#XV5+0lOIs{ay@1C+RqOTu0WT45JH(EMmei=~iCueZVaHK+-u9R3+ti7Va9} z1`XcuLGb3vOgp_pamNl0>+btrR*nqpIRklc!DdGMd8OssL)zW4jX66x;_ zCs6cV?vaCd=qzS7en&b2s{DXY4^NqtOMVM6dlLIedTK?{Nb&7%BlE0uye|(kRlxzW zVMCC8>Vd$up@7V#*b|$tA{#XpVgS%HUIWI;f%H-g{+vF!})Sek4VuM>H)FI51il9 z+c}e4m?4pFJrjdOF^`ZW^Q^DwB(gcu;hB6Bq5k0vi^LQZU&m>%Hc8xUu3i{!89pOP zVvpj7m}23NfVq{n3dDvtak1S8SvXlJh0$E)4N( zl$1d&g)@b6`!v)tj!Of~xybm&ArRYq58j%ULZArx?V|OKSKSicxDY2<>n+ zTT(DGJy{Ul7KtWpxd@1kjrIL=d`yIm4!Q!hIeIuF{%dBe!jd?4;{bi4q@JOr1CwV8 z!`>mg=tf9d=b4v{34to@BoP`3K|x~5Ad&S0bV&dcJNrk%cS(8dvdYSz!Uc6&ewG6X zkroS6U=kQ#`}+EBf+>_D$qF4fKQk621KdSQ6G4FiW67I611oKABXZiR@11tKL?k{E za(gbNtiJE|-|)O%Qv(yzk8ZUxmP|yBFf6f_Ps9hIPTGQtTHYXIx)^h-gbfPu>vOFP z0^=_gYqx-F-GN1Ltu1BCw8}qbSHv=;`XN-5(#X^lWdh zzimGDsR1m&Furp24^&3kntv#jM`-Cv#L`&V;Y`vcbySH^d}7?mK;|F5dt-gk%YWP6 zT}$*w0S^7%50TR3%aeuq#QuQ<1Z2*wiH0+6D&?4}d<%U8tk&cG@E?xZ)gbS+n zV9ubEpP;}07zxn{S-XGhx*R+0ajy%gW0P-LvfKVZ=pvwbZ0fZ&r64?*l~*8i5GW}= z(H85Q!!BLV7y9qjj&AP6MP<~!;AAAUs(8y>lh;1%s%w+D{H7XmW z9%S#jDhW=0p~Aew_69f7-YCNDL#YFl(CFn5JFR2>==zMk6HK{KEg|5j z6KOneLIlxmrgX`*6{wKlbk8*vsL?BO?^E?|_3fD9DdQP$c;xLd`eW`$ZxOi{1Yhqw`8pFUmtn(x^Nett||mr7VO^+ z88?>OldOInKd#QCl`L{Rubx?-*r;Xg6gxPfao2Whc1oD-`ILRd2n=r6m1umMtkkF00I}6$Nrx~uu(A}U=P98 zdsvs?){FJe^)q_w%qhsvdy6|@9xgdGCnqO$xbef0S9P^vHpTQen%Lnd zp4!-#v-+!9E3qg{&_n>D2(jBuhcn7Oa>QpKVgc2NOegFWGKsI&%MfjcTC zqGa@zlhn#?+|D*c3wc{Ik34}|f}jJ}iV&vk6#J+~*3^RTtHA4A-kt%^MMPcebxbMW z-en=Z3u%mSYJKdQ%U8k~^s|vP-K#>7g`Y=@=YE27 z{yXUpei|5mADx;82-tCOQ&3QVm|0zY;c|Nb3k&NJ;l5J@J8U3=!jYG-Gi-EwWPfk} z^dqI7!#NDUqle+6&X(PTqwQP+PJgsKg^J6Ob?j%t2U+|4^5ZilGnYd$dX2~3nj&83 zyzO1XK6N_E8JDNjndY+q@eVH}nsx*pKE8`?;~g2q-N$zR-gYo@Bp$4-rMSC6DC2RVUoMD$2m zH$0Dlelve$8-L0X?hGeSmy2jqu+t|H+df-swX(9(1eS>BS(uvxf_m=jh>-$%0I*|q z4vg5NpFkT>RfY7bcuiO|Vf6LwEv=gj9FlJ~+TTq4y{=38lUz#PjMBnAO51ukBQ1+Y zJerGWOff|yS-ftSb#JeE)J9E3fTmCECr-c{4N9Mb_T|nM^4Ejty=H=2+cOpL3Z(-; zCjIJtE_|hnT@+wrvB=x1Gu1=YReCbwDx~>O>UMd(S?I<|E!_r zBpMF<1@LhL_gSCxw|hqLQHmV1<1w6`@j$`XSv^P9`v)w9vmEWU#1KVRx58R*(0Ykm zv&NgbQoEpqh@9L4qwENL;}^X}B`~~|XhRMcY(qxd+}LZ_pArW23Gb3#w6CSs<(fBM zGavZwMIU+KS*uRI3)b!4Hi#C2gJfFYJo9c&{g)#KZaeG5UpUh8=WqvFjGpd4niIa{ ze{8opBlw(EolX3pcN?X^cGk3-T0~3^X}kcxb)s~6GYJX!J%nXz_Odx{R#syOUisVf z^z@W1ZA>vdn7CepR+2cF6*ppi!H@A5w~%>5xfW12DY;3YQ>mbKE5gQs{$l#$=ld#? z(8PD?shKFeYWc1%owyeFi=_9-)g4aJ-*&MxgThrw()_s;9&9;Ah|3DG{06&8UKj>P zuxMInRZgxt*Oe(%lb1T*soMA}rhVI!%HMC_RCg(h+&GE+JS~HxCqK`6v5=PU)gU!r z^$km#@F9O3D}1-5Ueszov*e&rf$a2Bf<0?eFClsH`M|f#2Jd5umoL@xeDxsmeYE~S zUt&stBGo5}kT$oU9)&s!ho*`870ZD*igw^+%LWva)+lI4izgVtpP@j6gnb)Wft^dHyTrKL#~a zQ<)14`m6h|4Jle$%gn4C9Cf?K*pm?@sx&S#sxVRLej9Tz?=7J8%MWcPkuyY;9XAO$Hx4rIin0=gChHI z4@G^4u`3tVMid8>^`jcPtcbou_!xmXtF$fMDqlatxc3|DRs4EO10FxIExp4p(Ub~1 zUwi?}7@EHVwaa)`n_4x_9TZ#@siqeVHkjOoe##qlYVQZErA1jjICxC}?zPzPB@+Wy z;LEBH46U~APG4ene7tBFVp5iFy0JY>-+#{c15jkT6LhVmA!v3qSxUk?b2I32K*r_IhJ6 z8+WbF-UAbT0_~SQp*{|g6X`Bj2FpcZ&501zTBmeBp{yKfe9d?Iz2$!!L?}@Et>#=B z&ngFle+7Q4s6&;nPNc7$fnRMZsKMBMpP^H)^#CzXm>hRtYA1Db9km{(KgLw$q%sW2 zD)JzcsuDa?PkOd1eZU?6EK;*@$C{4*!)%1o-{?tF3f@nZh@o;{x5V{grlsVOcvwOG zZhL;>OcfGCdsV?S0Sw@TJ4N6M<(yOTqi>4wtWrpDsuakN6h)&AdlG1rM-UpRok1i4 zbPMP)EZGTzpZIUnp69gXIqW;_Dh`AZJw6RwbO(g4yP#MSE?2%h%PF5VU-@n}@0!}) zwsq*iutaN;C0Qa(oLF8w`7~BuN9r)qu zN?YTvX49+HT4*%L&KXuW=9Ra)-5u2Uh2g6aMmlQAdRMzf7rrp>=@$>&2db~Ct_O&p z>tNp@_4~ZW;|u3b;tT5nkDT}D03ZGh)X6&{q6d9pSP9af6C=8m=#xZFF3gzX6mt}b zh5R6ilSn}BZr&1n3;;-?3KUy)Mj7Cea`SDcfQl=#GJM^i$OLCER}GJr+VcRz&qz#| zn{ESlD^E$gXb`5GZvt<#Pp8FbJU;#5%Sgm0{(c7s{@`dOG4Tet9(zuZD~&S(|B8=o z90A?w&Wch4(#s7z>0qZ$RSx1hqw41o3S}9w{nR(FGNe=Eh79u!2iY)K27r%Kdpw;8 zg5l83jMy-l$=VV!{d+^}r;Wg#RJNd5YYZHHBHcfPt$nbYI0h)IGVH{E93tMy7g7S- z37oASqb3q|fS&@W0gdUC=N+}&LXv-{f3RF2$T}I4GrbR1IB#k&R~(gLILpS`x;I|< z6Uar&r2J!aX7cTM+awYS5KoLa%*~yM zV52NN!fz5a;TE9=!^T*M0USQPYnkX9>eC41@u<0E4WL_xRI* zfVCg0*XJu8pFl}v?#+!>)pVk~dB?xL^>jLG18WBo5Aaug*2--O00WBE`R5LfRjO>a z5nHp7h;$nE@Thf?CLPI%N_3J zwismEQR?j!@esu!xr(NXS^ZSc5MmSkaBS7vwDa1tETBR8*ca71SyS%k_~X)IRQM(0 zIQez}mgHWM>lm?)+vswZ+5iQ!Q^=euT;;42cCpovhK#=kOp+M7saxh=tMT~9457E) z!N;qIn=wM=xytEfZ2p|y(_Lrs6^2L((^Dz2J($T;oAsP5D4I%C=wU1JX^aw&1aNqt z^>;QL{DOtshw4{dT8coD$DBe71Y8seLLegAUcbd;e%uHg**!_t5glRD35Iow;!|%ZuDO;bfnCN=uZXS9fG%6Y2`UdNn+BNpk2gl~S zG)L)Jxr@x0TNSGheS%TjX_z5%`@(&Hiv63N@+=~&AdilgE>nVZBR;2(6Gj4pCuct~ z4Es$z#RphPWz_HDO*FTIX)$OjO6PrREcEU)SK&sYYY9H0>MYx20zigK?w|HZ=Bufz z8WDCdPr8!ZYpGIWk22K70(Fmf(kW-HQZ7uPg@XDqFJCT=Ly1Iw{nq15#p%H&=MX|q zrhuLI#aX!Eo&Gn}0n4UAivD)csLY}D;ys&;olw+<5>>kGN4M^X!ob|im?`_PIY><2 zneNQ?EpGUpSX2^G*IbmxI4#O3Yj2qJ5Ba@uukFUBCzS`!V`mM6v&3uHWqg~xSL`o! znq1@`a-rBr{?4X>y`}oy3J_05|6}*2ySbUmSMcY7Wd3EQcNU^>o4i;qQ;`**Ci`p? zwb8@%k>zDB$VBN37n?Peb{lO(oJhmVSY7_I$IM{sv}3ue{jKw4wJ$-N2?}nKdq!MZ z5^=rd=N}rRvWxG@DJgE^iFpRQiw@Xc=WeDxtL-T6cH8q|*IoKo!Wx#+7W+HrMs1m= zx86J+SvzPH75gfQ6TDnmZv1XJ)fiws&#d@+gPw=yQ-lo0UF~;J?{QxrF1Pa}qK=TC zo}bTGn;ms=jb6Zr21q##tuBmwECRgTOsvfOtgF#6myj5RzJj@)yzQ5-J?EHd8Tc}J z?(5}kQrq>T^fvpA#EGmnE;Thw{rP>MEHGD^f{?-9F4w?(Tv8|egA>$m!Mw7vvN_A! zE`NB1EPgLfirXIbDqS_2gz@ol_vw!T7oJ@tpuFw4_Sc?_HUGjT0m;rFptpgVR2}~Z zHg$nFLMW&|?A;JN6S7;LDbwEGewf%D6BBc-5^Q6SV&I+02(nX_d}4d#d*#e5ZG_+r zKj#Cgk$W{z>L2jfF(DkXOlqI1f9TNv<&PI}giWbpx$(+E*@Ar{$adl1va@zX4(e@r*1<)ekENwioPN8S^2r^Ec7{|nkKt4cv8?o`+(SOMeuIDs87 zw$_1=#S5;i->>jcH}S0ugxGmDXMm{6L~S-t_nX+U$=HzTDAWNii|l@&<4Dun2kAcFj{U)Y0~wrpTNtT`!RSW9j1~p7oEEAm3XILhQ~R94zR-ah5gV zdAAM^wuu2=Gr3J1*=g+TBi_2Yi>^uGZ8S%(vE+Bx=CB~oI)I_TmAIq&P{3v9`8AAE2D)UN%2P#$sY$G+SxVTxqB< zD{EL$vROGR=FO<$+9iNf*jFc5y{=X#!j6b(h*(vDAkh8YUCp1J9dggCpPA1DP#h;PAqq`$P6i%j; zMkkDNdFxg~L`++87sul`f4?v{r$P}YgjuBzI)`e~ceJ3RzlD<~;Mas5R?mp9wQtM) z#M6Cph0Z=s`V&D6Ub9lmPOa#7Ke{QFShZaT0tmmUjNVB{2fF-pO#6LK&ZfZI)%hNs z$MW*BU5Fs5@ciuPbpQN2BoqarW}V1go#OG>{Qs<6iPya-C@Gil)MJW^i;7&v=cSh8 zXYWoJBlaso-Zrnw#ygl~EmqBN0(hIp< zvP&Z+-3?1EAt?*;MWjT!LrQ4`q#LEXL^>su-6f=3X%Lj}(n>ESDGk4?uQR{@-kJB# zow+l2?)~GQ`J8jlc|Om14k4kPtEuUwq2(=RWrgg5VjkXsg(PEF`30#;%k9+5bZgV^KG7(-^PtQ2O9fAUV8G!)5)bor1#NW% zt4iMF{QTin|2D9(>kRDAMzT*ZH#9Uz&!wb%@n-OmZvBYDUG;om7P!17Af=Tr^OJ>~ zSY6d`E}p zhtgPOWPBq{gl6UGMxo{!wwtI4@zLDnGz6r~ELm!G`3M`AQmXMXgJG-k8elxOGA z^$F%A{oJTE>kJBTm>p#S6l{)mxAQT3c{$YAhx+=aM1CACrhX9wMb4h4M0K+MPyvZ!~Pr+r3*8 zFqi>(O@adKNAri@iLP)98|~vdJ0axeD}E;Lyk%@w6G=rWze=!9!Nz?xMI$L2us82e z5c1%(B&pNgJCEd}1#&UG&&z~|+oSRMX|PJKzu$R0lT?nYu5PU0_79BaX#|<#$-zkh z?3`m3gJF<{az1QwyDhRXMz|9yhO!^eS?aJQ^XU4NP+(T8nifAYa%*e_+DP^ezHnk&&&@8Ndf_zN);oi8 zz`*kb4Bt~2zWK4j6D;B}`N}iz`hskC!9mB`y66Rh!1z^MMQgw*^XyqpJ-&MDi|t{q z!2~SN?(U+H>kn0OK23sTz@9HtycZGB5X3Yr189E-e~*a` z|135+Vddmx{CTW>y7sg#p-um{<$fE`wTfR%C6=3ecB0UWP+i6uv$AmZq#{(uK1N$@ zhxbO^4+MV}3+L{QPGXkznFOXb)-cZsd{00dD_)%ut>?4G4*V+>deGm`wGSd;yT6rw zhxB5*r+=!v^$Q7E(yE@TYrge^K*{_PzbB0#1QBDNw57SZ`9p`XthDq94cM=aX4o_vVR$Z++0v4oHr{EXyw7mZ&IwbS z7pAGq1X`Q^W3Pq!`)bg;?~FLZ2PrmakF}*&<;Z8XrRMq8HlL(Y_Dk1XI*!7g0b|s9kJB{)X;NZh)XW=+pI8F$Xu!l7 zC-+(d&-vr@<^4&(#Dpp1bI@1rL+KbBxgmB}>=n_;R2LGXn8CJoHVYh`)%5c8B;msj&TL(xK+6Kekrc$;Xl*&eGK&a< z8jP47=rbQeIbdVV+@7ZH-+I4*klXvSXPIwZri*Q(hDa%eJUAc^>M`X_E()Rl&5>4BoS_L6xs#Ov`dT<|EUXsP*KhCLM zHYr3Po6WuR^`Gctu^1Ut`+stGkPsV zUU$Pu_;{x#Jsk!DUy+}pn;#WK_N_MVwo+=E=;-JasdMbKdaNuR{W?I^K2J?ap(sdx zeK0Uz5NyHABI)Z8n!HP+Hkh^nB`~ZpTPW6dV7~szASps5SgNzpxGM|j?*&|)c`POl ziK4S34R*So@RAHqM9D!*0_a<##KsAH zA19k?k(bM_IKWCz+2in9f`wn9A3r6!+;B z9<2#v;7v<-0UAu820)Ce|L|D9C8L8D={s^8V1i z4n_|y@3OWx<`qmmlmQKxR=m=S0(A4y!5)@r5He;tSa1&kb+shU*9?#wEOp?n7m8Qob2ArLYQ&9m9LAE6%cw?VOHEi&9R#ff+WKR|!+l5d9<@`>L|~5< z$pCg_U<@-7~Q`Dmi+aiPy zwzB~){wH*iQeZ`|lxdPR^ZxnEt!h3_i=kf(pKezrc%*1Azt}X(18mlS`KAy>10FDs zqvnsd0n7mekCG)s;U0GV(d}cxHgp{CB9ga+lp_8U(Sh}|PrB0;jJ-@L(`Fc%4ZS%a z5NdB{2t3T?xFn~qqoOC4g7I#ri?gnVNL~tHMcRCP*kM*JX?{s|ms=0zwc*-AGwkB4 za0umyd63@z^}$Zrm=d9zI_-*<`oLZCp5t7}AZ`L6K9GI@7}cK*>xbE{<}7a|efI(h zTXc@5JdMW>ZYL&|M!b0P)6AjLXMHIBmv$Qvzkj~F^bH`0Xraw_$UpV>`);Cf(c53k zlKT0fpsIB1Y5J<9p@R>dISN)2uK_-_Dww2+(tl^cLByW*iQ`eZg++%LMKS~IDHW=6 ze^4O0ic=q_{pmfVF@A>IPN;~OSagcEyd}c3AsV(9c;;eJt>^a4hZ_%H$*ws&I$^G- zUFx{sA$q|dR{1wF=@#r?`Bm}nEo_TW5}M*fz7ounuMWAJxuG+_yV76Jsrxk_G)zYx z8yR$Tbo7HI*E9+;zPh#+gU`d3-sj@F)G;*{bceH-D-G{n7>g%*Y%U!l7FNQ!U@fjb z{We9Z+|8`~F@?;ntD~s z$fFrQyXJET&%@!fEK_%GfhMYgi5YKd@NW0@cxHDUldKQL6MmP~D~SR23UM(sB2PyH zyLr2SH_C!nU=@R*q^HrEEDWT%wO@j|&x1pG3D<8E@~b7x>dh@t&$7FQ>eR{6=gl>*D}T#B^^rKrv7NVLv!k-pv5U96t&$yF z{nA(KaN$PI)cK;s&sSvoxMJ7H%?2o_fJ3V726h4E7dCNgJhNyqFBGBbyN}g=YhFFm z(n>04q@dC1Qj_zreaf%aV0+=3Eb{XlL(lfEKXQioz2S32{bekz zWu@*m=155}3fs&CT`%oYH(pkEdaX`1Tv@}Z^#uVAr`q(XVaGQ~PjnxTK0c`^5F{Lj zvWoV$Fbul+`Akl-BRp~PN1x)YnsmJf zfTRbOsOehLQBL7yjtZvsjG(L*DJGuWV$&T^`M&0CRKwF;e{g8Z%t$l>3q!IgKa;Nr z`=?i~7;AG#$ZniSsgP0VWSdSM_Y(J**n(fX)#p1NG<^MKMfn)1Oo&F)U}eFs`&Hk z``i1+>)XHfe)srtbbPsgbb0^qytQ|Fc6on$_jLPD`|`GTaB*|{`1$$y{POzo@%i!j z`S|qm@c8of{{Har@c8)j`ucizcmMSC{Py;CeRHd!sbgql*3#PfZ@J;&5qEdbnwq-z zkN4EHjM%t@>+9=pIOGgWT)n-0FE1~O%9{Lw5(fu|9-clK8Cfzi^7Zu%;Qj$!JtJZ= zn##&*2q+Xkzrf4OD^)eE_xBGr4n7evN$sBo_72X9N(P^wObSOz+CpscvBV z=HJWMqOp$QjlH952>q&&nZ3i)8z_?+laS7V!QsHb;H0FKqT&(=Fs@fiaw9hjb(udZvZsH)d7 zcF4{varF$!Ehw{h@hd2Ffq;=-J4tnTaVFT;Dw2-rv7}evU6LG!KH?hX?mhj&mAn7q>ST zHr9Oevx?hWl4>g>%8J(h?w(y;b&vmnus5ama@L9p8JL->>g_U3h*FFQbk0aZb+uLq z@l^{CR0$6h3G#+8R=>G@T3Y*yrvP$=fZ*4b786!=hdi76BGNB}AwJ`*M{%+8ss6k_ z?2xh<*x#wtF|$Ad%6%5?>&c>)`moX!EG9!311s7ID3&zp3k>Tg2>-BeNcB5alGF=ESX3#{oo z$AW_1S1zLeX0`IVPWFEr$soY&jqCBqSA+F;*29;P$P$m!}Z4| zkphg4nEl%@(W&RS<=$R5yVUk|$*G=n#&C+eF)v;=x*oaRg6y&olrB|894O#xhR+>t z-Qv!e=>|@v53g51nTXb3c?iyc7qp(}se-m4ae5=-1;G1TSK3{WbxZv-|oJTguO zD}U;eOlq-~ZwN@_;oC-f0GHF6YDKq%Hi4QbIvv6=OLh9hyx8#4`aUs!ei0bCGX|*7;c9D48cG zOx4U)VM=U*C?nCoOdX)9Q!&pPPP{^WxIHuX|4ah9{YFq#RaZm&81c5Gt*|7fC(R?q zOlo2e0J!WgVm`5VTVg4%e@WxkK7FbaUJ@tAC@gO7UWJDGp$whPfA1Mzi%Q~i+3n$p zT1V#oK*W^_Dl4mP@DCSN+uxL0XuKf}a37p7P# z!dF}QR~S)OhvJ&K=u-~h!5kXzz87$2!Asqw2&{Oja6nE5-cfPAzr&FlPCTRix?1#6}QYzA0d)}(;{Ae(|aWcAUIykF`bYGrPHJuV*=z=PGM1*ZkadFm0GTYzA&K9YU zFDoIz3>9x72|0_yD;O=96^8hp>I^K=@}0V8_Tj^(Nlb#XN%N^dM9QZK&cREdkwu3CIcQzFvxnL115oibt!3 z1~L?Ow8fI4d=!3|2hV2%fwu<^3Gbyzi0hZYk_GjjM{4IXuV?iqm|{#Bs*HzvH1n0a z=%emstEN%fQ^3u^#Iv*d?1LLKWV7RA1N*LrB(mT?6N`Zd+RX=KV2X{r4UGWZ@+_sz z?z)=NF8)+dZF%U9`P=PQw64vWU8}1%-5G+-60DY}hbE7Y@U0qor2-$|`1lE@+hHy{ z^QDtW>QOY`Sb-0bkupD?HHwJXx*$i0ESP#OkPbf}w8f5ghkgM)#J`K89ku@)e}c0Y zhd-~Yb){exdRgYYuU52XyI4|Iz`^w{`5Pr%6FM}&jqS8KnM5_j#gG5NO0B*_tpUPG zQqg@|id;FPEW}~1?DPRbI;F9e7>AbSoG3wnBB+i2QK9Jd~uykfR# z}vuP$hHN4HbR($)mTYxPc}vD6=hN$!JIJ?j&)0lc`4lF0n4rdPHt`R+lk z=dOGAj+Rzq zQI0R_XFapw$q+StUf7QL-a69T#WUHPPcUpc^t5mFG41=D@vVL|D?vJTkASsH;3*P$*oB5B@A~|J3Q|$-|piK zSjrsoJAIcBCKf zx=s$$#F@w1zUImy3&!t=ZukpJaciZ^JO0Y~if(%{%ij#j+5I9~vNG&m zjxr^WdWi)!EHPYw=0o)87%*@g+-5Fhk;o_+^FGbhJ2UD836uppiB9P;t!!DyS3esX zIewGHhJ6Y1yc`T9@_l&1AMoa8^EEs=l4LEA?6EHJg^;MOes*{(AWD~L<$ybUdH(gy zH-CB=iAXm!we<=*^3C}y0tVYUgIDm80LqTe@a`u+%}oE3>p`nj3wp}E=voY0zO$w& z8rzTW*@y;D1nP9aJ9g{Zv#Av=d71-Ww1l3dFcV%+n}~jM=A{&b>HUR4U?E^6 zdd&^X>RS$7LT&EIAP&9vLZbq&4g6!Z54V~5IuRd*5g{`L9t^d z3-(OM*>JoK%la8zU*5q6ZuckZi{b0%13byK zZVL}Kd+%tco-B9!Ji}d6yqGCr6-0lf{^^t$cW#)30y+7wHGNZ8oCWBQc1WzxCf%PI z!WW7PnwRjyzW9V44^Q;cn$@~!3cRyse*|fRWu@q;k}@2nr3{AlSRv*{OL7n3wY1m99OS{YPk`2;zxN8Ku_t)t~=;F_?*lQ|-$sbN#?PGn3 z;El&O`gc_I4{yZ4$Bp=dl1qBI8OvQ<^P;2V zhiIrPb@D<{Py2|fAI0=}cDIXb>dEdT&;yH9Bm5JxBHyHsOmmWdBRH>pho56SHar<{ zZ>!e<_Y_*PuNWcWu@gNUjUufNSzTrylF~5hG?}xSh{G#V?tOG!eC5Pp84pMMfMu+R z#)dM_NQZuu#Ene~CnY}KsR*4zo4h)C_&(d;B?zjH^1^wFz6))pOP5w8pOKCuc{OW% zIw5_q*!>x2kEb)g)d7C4w`A1gIKc$ZU$#f)4L5`2aR=zQ;Wsusw0ME&anrbGkpAyM z-#hWU3gLkMWZt+)c=IUB8CDKxe@|$Hw(WOpu{JTuD+3kIfxzWv_J#f*@ZP*Fly(0R zFb_w6U_EJ6^2pbSCNof0a=%(kGp82aFVSDPXB~k!Uo?tqK_&SWZYyZ%3|%I=d~R%N z4Oc%5x&H>9d5Whaw#eSAtS4SZdYxho{ZvW}!1iFDqs9FR-T>e==Asp{lrYj6K31oP zf{2#UHW#*tU{rG2WCL5vCg)|%qvtngqBCbrr1WYs{`7U!XmW~pS*n7OaU-QPbyVS2 zO72@XRIS*_5CsV)24!`#i`)3XwB!;4-Oiielv6e*1n(_#;840>egxdN6EU*?E-y=A zVSlWCFaiZqIcsQ1`mfT~Ni&jRUs%?CXCZfIWrt>dLSSd)bwo>saMWqCn)@~JDzuho zJ64gvq#;;ql-4v_yk#Y=bZLMTqF6C8)I~)P-*xuh#qMd@bwbIQI-(I7OlLDpOrA4u zf`nO8AYW2{-N5gNCZWpyAg*Xe*jI}XMiT$z+N7885$&@Hc6oDWNpvd`r|A5l9*Avw z_!IZrqvOHjedxL6>g8)==^jLFOJxCUU~E@GAp%Kco7eicA5Xa*RD9cOgfM|Eb7+P3 z1r?pVi0izrucT<%#{z*Vf7gx1;(1p>+8I{~_o}Bh;`GJ;Owx~rk{sKPD)4J9l3~AT z{5ew=ReKBp1iaojaAL=Mo)@3vol)MXc%#C9@8BfvO`FfpGB-jSdeV4nVuyC=MtB6D zO}I6T2mVKhFlJruzU9W0+)obb-u&~U)UOox~ISE zCMo_+d;kI)_{~JFQ2g5+L*`Gt#3PtVhVS3bJqGzB*G{VoWZVMj)}5Y9opjqqxpTv= z8D-AMVFs|Z;*stASRd2#1!?6Svu%5amWte%Co8W!K3wNM>zqaf4M2(y_0#Qh&{a>sm+=dv7#B=lDwz;SbSIYNze4F9a>)OTs{aA*{}y~V z%Y5N3CHRQn4Nh2S+HUX2+>bR}Fs$$<5#NBFbbyDuYR7S`G{I34J0f% z$BLfr;Q*<@Rjhz3>EHANS;UBU@UO|Zb1U*yrr?E9)}PATj_z-pH#Qx7UV4ABvqB5b zdwo6?bE&=&5)ee(6C*&W_7nOso>6385#!HO`D8#t+YjxzLhLZ1z4|UZq#*hVt}6DM zoJF3>jW^lVA3kE45ydptYka>-p$ge^O(brFMe%^zPG$Zr>nbKB#+6x9#^hbe|GT-} z<&>UE@gmJ;eY+6iOk~LI+l9{4oKvc={FYC;F5%a9-?R>OG|0K}h`Qx3`VT1SCPCQ0`#z3HS44(~<2`&B%fyg!l z{<3KZRDuRSt0{b3&%Q8jufB6$jTKE;dB5%I*Yv`}Fz5#0JRzFQ}Ei0tDgTh4m3T+wm3)^*(Vn0L=J(bP1_A(LwV_+Zl z7h`q9`2cf}B!URpOPlH_pFgh+O)(o~h=jasn+1x>A!ADLy!&DM9~I(x5YVUVKK8jP zsOCcX$+4`rqKa3aA5M6CUwk|K>S%1(O>w=e$=N?D>Anb0=l6sA@gg2VMUJ=Dvd-rX zDp9s?XSc_fJXlakS9qJM^AHt_(S=Rs?G8%f#GH(pUG$6(6)8Zo3P5fKD`Mlu7t#+h$R}q)gGzKDjb>wS3aZX{M;mHYf#=}f(hZ8BNQ7<_n(I2&$cxXvv8->-eJ?xBBPa z#xXTFxJ($eC&ra*(|JQM@JjT;*71>~MX>A73BLlyMcf;kbWU%STLqrrR9pjCCP{!A4hYt=aD$KT*GSiQPmuBeOg%pOtrN*4Txv`n}Wi^10PTTFu;6dw!;x~%PcHqPgqPS z>Q|eEL}TZok+G~N*yGY@Tw0fYNBpeyg5T9@K9Y_j(i!!}6Wgs>s4hotEns7e~_ zbW#>2H}%QbT3`Y?$kfhTiaWF3_}uJ`s^9%(*gmPKPKID{rr^sCGboL10545JXiaVr z^`$2oBUWu3$A~#Fh-NJRHpf=-cTU6?DS^(n3;LGDCmK6_eqXmh0fnovDD$rjIoq>j zPiKS}-)y_vAJ?38Jo$U{mW5Lmmfq}-B_D~bKM-InMV<^t5uF5`5;qK_SW}|+Q^sgn zSS5fcB=c4`Yg;4e&(zo~zzJiVz*?LF!r;B;11x7a<&ff=x(n*_nbi7&-^$KyKvaWq zIzM*#CG5#fx&meE4WUb^F*e}&crf`og=g#UpI9lk3K3{U&vc^P1MjO;aULnRb$+nE zz#v!rn6w&pA-BMMlahs|wF4~nqchP%=GlX02X(oyy`3q{61r4UWE!4*!A3W zJQ+zZWCXOCKP>a=V1ZWFsiLZm!=6$RuBl7JO9ssDWq+k7c6-}>cC74vb3A*W_uZg~ zjIFMBTNkJM8x#-`-TGhNQ;nyEpsJwDe;%e}N*GdV)v2Kx%dBLrC|SuJmYOL#NCM($ zSjd@7b)JJdHWgqPqHZ%Nbb}KS^)-3E>?cLggKLp8EsXGd*UI~H+#S57{A_7NRuJ+t|KKW&_y%kN$aN=WtSsMv4b&JoAO z?z5s4ko^A3+lu;>#)ZD*ZGro1Uo1)dWF%{LJqEvI1bC0rtQe_wLy8>vIs7kmnX@3I z%9UV=Q|<#xey+Taza}*JwD-+Uu>Es8M(6p4x3W6UEB5bg6#+od{WrQh-_)4o)eRxv zYF&4qFXr}cHClI%N5c2h&xb5>y3Cp;0=1M*+6gUee6>I5Octgd-xAH^U@OhwNQsT^ zg&7duG6!9mOq9fp@8J5>$zxT<`la?8*f_R1hI@~dBYR!NpJNij$b`0)89p*&F$@eZRCywu=I#y698EH5YROkcX0vr; zXK}Tpb+Am!Q(7-)UQh*Z32b$KY-4JqAl$lTQ*!sX+;#!)OrcYyQ+UGX7O{)d9tb*!lT{x%J!r{NR#s!j8Z4OP+MLk~a%{I+wpviLe4I ztvMRz_YYw&Xd+TSDPoA(-sxTEv}A@Wyr-BSnf{KFA^0UTLwS~O*SUf(8O%5b1zJY0tA;bjSjsU8S^nC*w!8icCb7<<)*8cT%T46B`mN1$@^ zDj(;S{ECf~T-4Gg~(#@tZw-I!;TjzPFBsd-UF! z9nQxKU7TLdzZ3orB3eCek^-(yX2&KtuQqSNVNGjy*ID4Pi3+do>$92u3_hy>qVF-UO!nkZXT4VDYyeFQ>R11|~ zJ)?zSAkkpPgB?}o{0a-LQJfS$Uw5dUNNJLqCL64?A8G0h@+AxzUZRDp0U{lW8o{VS zk+qf_vh6xDztl_OLIl62q;4%wy{P~UOraJplsVMLAo`IGwIiWi!%^{GnJMoN_U&CI z%9_AyA*3RDQx_otqezGK=+&h~(5Lwposuzp0E7o<_-E;-u_jW?ZqTq(@)1pds6xVN zP;tbxgSgwKj8;ljE-O^Df{q^7dN9XuWXx(3sb za(69ua&5$pON*AjgI~%p>_E!vg?=})Tp~w^9G3leY#^?@pW!mWY~(ioIBe`{A>ynD3wGn*N$C*00bM$hcAs-n8p9Q!cbD>lMH_ChE9bFb$yp4INKGq6JA|B2E>V4mQY$Mo(N=inoBjpXT7)l1g@wZuzn&li#{Y%#2Le)Xpv-;{ z3yFl%!0W_wjia!Dk-nls6#oE_JIs@l1N(miuu008C|&}CN6C(ry$T!iSM_BHwa^w9 zw8keyJD6O!`XeEnc+7_9^Iohtf`EbVqHVF752UqdvkW9@6edYz^bxB{kfEr2@Ej_8 zTg*6pqa{=*c4?vzDhKGd+}}jx+M*vi2FhxdzGC7(_;E?R$ony6FS4{>NJ7N|)94GA z{`6AJKUpn7U01cF6Qe+Xzo3(X5b4yD1XuW@eR3Qtzj@H+b;)#hQ)2ZZKzP~~_P;GH zd3*m|^6oL?NvCRs^9Qp*kdcRnXzOGu!0RF3BoCOmW-d|Xc(AVwPF@O#dD+}^a)Osm zTQ}#w77hc={6v1^CaX~yl@+=Zm%@gq(G>F3szt`CJ- zY|4F&Q0TgQ`L+cc#YPTGzx3Mufxa*0L*fpCC~1eNt(=_EN7DvL31Y~l!{We~aljPc z=T3S2rXe*%M4%fRKooO2SuX6+kJm6&!eb)3(gFgSO;qxDVrxrxl$^X~kqi3EB{x8# zhrJj}oN|CKm-=0nuvC}lZ&dUI_*hOKFLgsMYrO33 zg1&|zKs;qc_&&aS5(#>sm+}SI1`Rm#i*;zzRE5h;Dvc)7x_lvN&9pg~+nG>kW2ttD zie%rMNHI&L$GTkrR4IHitX)Vr%v&~P!a?$PHRUwoWCiOtjy=!*iq|bTkSe^rr*74x zPPt8Z;9vEqP#JCPu+n5*@EV1K_)(s!m_s>jb41@gd!o|Z`bz-B0GK0+R4O-?_ZmV1o&>BhZ4TF z-E=hMWFBr13I2XKYQcjZ#9;b&vjlzqj9Qs1cA;Vric-Uz>son5s%0tb4$V3N%O=TC z1yZ-2At(^bR=LJp;4aE1=Af%YzQ>#s2iFYP62#k9YCb^2xUJA>AG;icB`L`g@)Z%I zXEBM8&cJp^bOaol1}xx*<%4SY1Xv7LLwvm9Xvx?2-n{-^!0)er#5%r=s^)BJ+JXpMAwsh_m2}lv3$M$0Y7k_5!e1RLMS3{ zi=dY=F<&rKFh$uy`fTb&$T4VFkx6JnSzJED;}8pJ1;g!@iVX20d)MF%ra- z{s(_D{+n(^@d2;X^jNgp!`XGZa5*6|G+G%(mi{tC%kHlICs*@7TJX*fqk zM|47^bC(H`5KtVlGe_F|;WKByg+fIEx^KHf&sdBCpMNoUu_m{=o_p{v`_s1ilRiP{ z>lcFE_g@6PTYbEt`+}Udww~f={EhZt80;PPQ3}jmpJoTIf3pe@5S#{^pFV@SsH2jy z-0vhDi~0QRnF+_-yzlCjLc!Y1D;Zi%?j2f9N_j;FG$DWoUIGWQLkWNq>ju~60bi8v zfY-2{Nb1O-$Di`ZSgC`)k14hf)Qs+4Gc8)Sc&oa(WcZRbLH|VSbH>R9&+z)wt=SDq z(3}U`;u^BQT}>e}F&oJYS-!=9>)6lyBhxV^qtWQdq_AQftw!oSA%WmyWi)xaEd%;P z6UX9>ds287{^AVO1Lqx99cx!uAZ`dS++l;j;;l_Hw2Gn&O%&h>RGfil1zp9Xf)O^$ z^Qps#Nmov)p+bPsSMPY{*}e{@hXc=w-K~$KFJC!7%m6#LB%qxBXT(l#WA8&8vd&ia zc8B<{A>pXlLhcDMHT55&#ase*U{14x@8U+{%L{{=?BhRBptupyKGdiiwZ^`Z=$!d2 zQ#8S?0>MTw=D&H+{LI!1iQ&T-GTIy(ai3;W)YrD@Z2V8Kg*5Sec+yQO@XhkZ<)GiJKpEerPQd=AHB>z&xp4DvY2B!fvNnUf!R7Q=ed*8Z1*s%P)a4xg z%D9*S)?OgE63Xg}CCxPKzZ60PP*z*UM>E}poMd&t3`gkc>=Q!Z1>3gz^WnmK^S9S+ z2;ODyrSI34mKXuF?(OrCCEwSvj5m+HKT<>@0xwYNuI`tL(11%;D8(AGYb@VrlJ%(r zry+dDlKQ2E!%=7GWXEo$8oM+g;oqjrKaTCD6U9K%gt@!=v7fZ6DB5hhnlgIeW7!DC z$A>#&v;+_4^0hQ-PE*;oJQs5%B%xDFiC^DHb{LvU3bfJ@nlC8hW}iA|=G*CGL70l| zdHeC?b3t#y_H^eV?omoS3EpYFM`95)!11MYnmuBXhOF5+lpn_f``QJ$mt&fB*ypMY z{NF7HEqJ2y{ED>Z+WF_Z`!eZztcx54$b~m2Ef_#L^->cG;!}oGd(#=se$ncdRvYYS zJRi2*SM?#Wp_uYtW)R|>&1$L}5=E;I5nmw_oy+SO*B)Lc-9|Bi-T=Whfls0qg(|*f zfyWi6$t|wiD~(gE%Mavr9ylUFe{`=mVDr?~gRkH&_>zIo%YApFgKFns@7yUz;P94; z34EJX=H}zMyLWF4_}nFd(R!b;90&s8E54BV4#nT?9#1@x6%b!h%K$Y~)0}x6Na$D4 zQw&MUd9XB->pW~Uq4r>wjGO>4#-r?vcc!-8GOH_cNJC7!Kys+gOK zE91t{8rolkNA3&fy^eqN*ulz+PdLt&n^LesTV@QTTwgoGBe>f#3VQ33`)eyr*oQn* zVWsXDKfp9npjjZx{^S?x4UB0@Bsnce16dqnyUd~bpteLdCQ&)7hDBXlf9-Wi=-A5Q zGyQHyoKS?n7E~WT4%(SjUl@8R_T=1*LQG9sIlV{p;WThnvn-1aCxuKY+AtU&&LNI!`|yRX_Wn;8@eFtCP5@q2HS+*8gBQB?to% z)}9egK-(tMM%FFpXU!C<&7~Pg0n7Pb)UnfIv=Ln%h|G_qIl(@!Xyq9w0h&~XC_LI> z3TOhAKys2tCHP$dVKhg*wX!l82Yq6A?tbH`&L^naNf930HT-y`o@@}0)sZtl215I( z$5JNUu$c%(+T!Kb#mwePdZg;AjuX@G-s|GJSEPDd zG&mgZXRC@zOQ{X4FxrVhFr-OC{!RNKLXc50Ol>Sg)^F%yYCh@r!S#rxTaG(Qih-C1 z;T@g9ni$c27r5u{y0QVp?OiCWeu58DBHZDpp1j|>KZU2Ik1$w+j8@Ww0l=6uYfDPj zj9VUm<`fBJN>L9R>|Z}bqO0|T=_i~JiM+q%UzNNL>3#ov`$5zI!K5eqU5AYh#Xsm4 zdRJO`4G=&+ RO+glGpAGpNYve$MBl4MTH_3|v<}7VgLoT;7_KzM7C;g5rRqy*r zL{BoBV^S6Ygr+Y{+{}PGmP$5v&eLyf$%M+DKK!B!SC?q zHz)F3%9ZaWWSRIl-TuaNX;)@&N4&N}o%y|tC40<>4s*{B8$Gc3R;jG}k$Io=Q&C(2 zDq=0wP=};q-s|klMVL4Ci{g#c-@B){0h~^!I$!(0rR$2oa}vWp!blEl(F<@ZXGQpqek$TaUuYX78qiqwJUJr5!Hqu8PysFQpIKbL58&@&aDI{| zOwkU#Htw)u6T-!pKokt~aKgNKXY4fDcB(AH8!@oyFeCkFVRo#7pRe` z4U{P92)P+LJcsY9`#H{0<(2P5g!FP9a$TW7l|Y==?L-XCjH;-F)!CJb8k;LB8ha5{ zUgz`+Aa(LWkWUG(qX#n0y6g-c5bJ4o zXCmz^pV^*m6ljh?zMo3ikqwf*f&Ar}=X|Sh+vilCXsZwBsLggK%t)aL_WI{f(4Up; zSupYWpg*~q1~vvH!c>MUQNr8w?TJj0cME9cm7u|T96JjmaA(lqX zfKfz6L`20zHVDc#?=*}-R`hwBb^FJ7wG-A*F)U7YW_r7QBF>>=4i9IfH*{#exi#rE z)X*m~KEd!b4WD4!Yh8q7VK>Y*P?q}SN1oCMUkk@NImz>e z%OYOgbCPsXY?Odb<7Bnc*jBq)SsRkeI;M|fVyel5U(8LQm_dmJ-6h?_2&19FNK5?( zbKfEc#K(4DQVVanv4ynaSZ;g;!Jl`r!mk)65Od&!EP2CSKxxaZH1*l4ht*qH>@Y!Z@f#21A$iT3F%61$Mh5>LqfD%SJTMW!hy0U+FIjMYr)w58H)KMG2aLw^l4Q*7Lpi- zw^^>QmpL5T5TqY>M_3=@wydq5pmn>)`$3rJe>7w~8KeBMYU|61-)rB0!0NXV z;ftKtw?vyvIM(Ks-4SSLy!N^C?+L#zve$Dcl9|10T^-RtZwSd#i+A?3w?K67UCE!k zKGmIQ`Cc#057*^S!}FZ15JFTDhY;K1#6hoo{S-v{jqSaJu1B;U9=qhek-S_L!czv~ z>P6N;`}Tr5FM%w>_XK`b=sEt6WTUg_bZuR1FCRj=7yv(ofM4`C!3Mh_HdkqBo8At{ z#oJGPC_D&(nCFoiuOu)IImjo_93RoAy6KNC@Jgc3ZwYSu_F;}MSWA0W4gt|0!oCJg zg&Ph(<`&TJ-n;GYeHVIV`&yu1Vj)2CR|!EyH6sC)!q~cpZmNu6aSXUFXGIg@?~tmi zr?cLNf&_NmUO00q(hWHD*id5T^&a!lW-STc4J}>+`rdw$-~`_zgVQOU5kI8qDr3F+8W<(qOp)pBamj~ z%qiNCFoP6bY8xuen2N3SvZj-6Uen7IXm>T*RcCOUu|R#`uX^yjVXF1r6&&9O7b^+t z5zsZFgFBHonk^A_dI$L0a0g#7%nbD4T_Bu(eO921SL~m(7jevJ9&0zL>+B~=XdtTG z$@bH6PdHU4>SMNc?GRO=E^5oaVqmypnn_OZnm!GQx|pC=}vGv>Wdq$DX^WYw)Xg(0q-NvWA*CPe$Oa-8Ci#LDK< zoH&1;Zp0;|xdv*Qi_O8@I9xE3Y&df%492=m0eI^ZSDNFS6)8!pG2C7Gf*h(?^Li^? zM}|8t=mxQe4X}YAPRI$PniKyxR>r7d4REKW9KY|f3c#0-sB=V5!4h+<_JgT4F0EpN z^Zw$(2KSGtkt4hnMX_45>VyEPwFF~!SL>2Eqn#vjACWh6L!$FlByIeH=2?_TwNYb} zjQFNa6q|YCU;umNQtcuWHbAxB@PaaAL-@{dY-7)9e2qR17NV0*-YK)-aAHB!|+J!zJ6 zVZ3Y?ui^a$t1YdMD;D};QMGR1s~=9Pw+Yr`eiEQ~aX`I>QUq=#X^_7-T_c35mLrY} zsc+FJ|Hn~E8WJn$o32--png$$wTmiE`~sMTj#>O=1tcaTVbSlm=X7K zFfiTFw=w94C|x)yqB*1p7x@jI6dA3%AACu>jimYV2dZdjsOt%)GC*961mpkl!p(oK zj9LbrOaT4}6e~n|IXy2O`8;CHJG{_UDYgHy#Ml77{!@7+qw0>Vw1Zi5rK+qRq|p%m zCRzS~X&S5~c6VMHDnd<)kQXX4f*(54gDUdv97;MLPzRC{5KvbGFn6+}TTF~!Y#nBU z!6tPMsD}F_Kj8u5+qrLeuF5#O^fbl9AX|I&f_o$y5$kN>|AI<^l97Tu*hd%@PRDG;`)}w`HtZdcKM7% zCm+ZoV!{EnjXlnbtdawq1cHB9Jb0B}q-oub`uMbi2Lk|4q zpq*sTr8iMThx)emR}Ti?okewkIG{TYht-wbo>xT~uu(Qcs@RE^eH7ZAisM(H&(Gs^ zVorCe{ZPsuc3?KsEk@!Da^IOarvchULVfq9z|V3inqm;j9{lb<`FPo6F!Wf#TbEDp z)VMcyDa^*6Kiq`_^zZwK`d~AdM^PCxv#NPAwu2ptwv>&b_jo|Oyo#TScHIbEh`Ik# z5`Z3Lf8YvJYyVF+D{QvTIUnvmQ5_!JQGE7S?mJ2xJCq84uA^Cc~#f3g5qy{PgaeYfgDbC*4 zq70!W|5amfY-0=4El?pWT&xMxUw{@Z2os*u>zajA>^Ip_cwn)Vc>a1gk&GHSZa%59 zu(%fyLm1=l0z8ew55E)IeA&->(GeRLV$%6FWmIoU4AdZ{f1dRJa~kRYqb+0(E=9x@ zU19^Gj-~gYp}80Ui9`K@Mn17##}O;w4Tq*)j5q3vZf(g?Z-Dnw?W+4z;_FP7j-eTz zv#_w5OmoR#rJ6WNrp%p)bFt;ZAVI(`|+K}$3iP3u-pJFqEH7leVAZPhg| z|D+2)Y$UIeZRsdhS9Ai~;LJjdfd;8cMdcHz}cgK~kd)7-T_( zxzUq&dpesz4lc?Gj3h8@40@b6<>F$&U5yms#&`)L&8mZD%1U;#3*cl$<~+Bw!fn(3 z!oY$Hg@?z5vFv&${HIC0afOTzl{3UAHZN6~SPmjWocJd}o1n+jSxQI&>g?th9A1&H zN;1T3{a}< zJr`Y?``@$eSQTkG0!7MgEvl6T0_5!Az5qs%5HPO2LDB|wCKdhu@LRr*OGGhxx`<`% zh>=8jT*#7}BaWTq6PSyU+_;D9*%fVYC@75Q6?*5aC~e}!cT4L|Q^z>mt!KY_bq0uVBE^=tU|=61m- z?U_u9VMU$StwF&!MgUpT&fob8!#WQm+$SiDez|lb3j|C5&9LU<@_hdufLh8(W-LD9 zMdEZkS4tOKto~@5e%hv)d(f+QmBochpa{*#tjN2oUBmYH7$5@^WKO_%>hz$cKS#7T z4v$Ci=rDrG+bKs1MLnzE0i2XerP)`~QRB*RaI1u+PY_;a!F05Fy@N(QG4omnj8Q(y zAXbX^y&K?Z9LJmejX&lGbT;Q1N^}+~{#esBi1Csnq0W^3)mYTwL;#q#_}0fZE9pbb zDUJ;^@&1xK4Pm|%0pX{xDVQTNY?KBg!oU=y6LIwzQd7-|Jwg(RwR3aE)lbWb)5~cz@hw5GsaE!H=k*5sC4|ER?aYiyB@OkK$8m z_ioa6yVRxF6;(kzX$gai1Yg6yhvRS}M3_3#&P%=^+}^C%ablCM^e9kDH&bH49cyoj?vV(DSSNtMwe=j*TtA9~~!8Q9$f#QE-wB5aH)YR0{Hh8n^ zMba<4W7kdGWv`>@++@(BRmm!5$ZnBFY=|O+Hv3aeFwz<^+R5t?q> zf&dya^>?s^D4-o~9~Z&JksyWM`)<)E*h(4;J?*SvDf;?c!-IP#xWLP(w8w%kf@1Lb zu;@H?;ZmQB8iGQo)TE?Ww#ww@9ZeUQZ*9rP)ANP3;TOsDMHv)Z*XY53eo9mmp^#Pg1EBq+H>=tLj{1q?L_gbYAQ1R@cb z3xc2~5rT(Q67fTW`Zu8H6!<43)@sOVR*nn@P$Z~@Ki>UnXY1k6dp0gwPN&?JcvFZj zY8$v?RHtersl(79-!mhBpCMAGa&`tcQ#A6R+D(|fXM1VP`KgmPT7f51rmNw3^+iI+ zcV{T!#V!7ouAh#j(VJYGKwaA4gWF*Uoxv${71`7t*mF3)?-p?S+$D*xcD@kRM$1!W mmIA9Jeq*mTg6WALkUliu-b0qRVg7ynhmaOm5UUn32>4$%&KI5l diff --git a/docs/reference/indices/rollover-index.asciidoc b/docs/reference/indices/rollover-index.asciidoc index 296ca88055097..4b097d518d6b4 100644 --- a/docs/reference/indices/rollover-index.asciidoc +++ b/docs/reference/indices/rollover-index.asciidoc @@ -255,7 +255,7 @@ POST /my-data-stream/_rollover <2> <1> Creates a data stream called `my-data-stream` with one initial backing index named `my-data-stream-000001`. <2> This request creates a new backing index, `my-data-stream-000002`, and adds -it as the write index for the `my-data-stream` data stream if the current +it as the write index for `my-data-stream` if the current write index meets at least one of the following conditions: + -- diff --git a/docs/reference/mapping/types/nested.asciidoc b/docs/reference/mapping/types/nested.asciidoc index f948b38f185b4..b8b10ed6993f2 100644 --- a/docs/reference/mapping/types/nested.asciidoc +++ b/docs/reference/mapping/types/nested.asciidoc @@ -181,7 +181,7 @@ during the highlighting, these offsets will not be available during the main hig phase. Instead, highlighting needs to be performed via <>. The same consideration applies when loading fields during a search through <> or <>. +`docvalue_fields`>> or <>. ============================================= diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc index eb8e98b515aa6..3c1b0c98017c6 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-transform.asciidoc @@ -153,7 +153,7 @@ error count. The syntax for the `script_fields` property is identical to that used by {es}. For more information, see -{ref}/search-request-body.html#request-body-search-script-fields[Script fields]. +{ref}/search-fields.html#script-fields[Script fields]. You can preview the contents of the {dfeed} by using the following API: diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index 31005cfbf1844..5fd320689bceb 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -1229,7 +1229,7 @@ Specifies scripts that evaluate custom expressions and returns script fields to the {dfeed}. The detector configuration objects in a job can contain functions that use these script fields. For more information, see {ml-docs}/ml-configuring-transform.html[Transforming data with script fields] -and <>. +and <>. end::script-fields[] tag::scroll-size[] diff --git a/docs/reference/modules/cross-cluster-search.asciidoc b/docs/reference/modules/cross-cluster-search.asciidoc index 98c64614a26dc..a735ce95f6d5b 100644 --- a/docs/reference/modules/cross-cluster-search.asciidoc +++ b/docs/reference/modules/cross-cluster-search.asciidoc @@ -315,7 +315,7 @@ By default, {es} reduces the number of network roundtrips between remote clusters. This reduces the impact of network delays on search speed. However, {es} can't reduce network roundtrips for large search requests, such as those including a <> or -<>. +<>. + See <> to learn how this option works. diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 8d5639bf33b91..872a3029f4e8f 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -85,7 +85,7 @@ See <>. [role="exclude",id="search-request-inner-hits"] === Inner hits parameter for request body search API -See <>. +See <>. [role="exclude",id="search-request-min-score"] === Minimum score parameter for request body search API @@ -113,7 +113,7 @@ See <>. [role="exclude",id="search-request-script-fields"] === Script fields parameter for request body search API -See <>. +See <>. [role="exclude",id="search-request-scroll"] === Scroll parameter for request body search API @@ -142,7 +142,7 @@ See <>. [role="exclude",id="search-request-stored-fields"] === Stored fields parameter for request body search API -See <>. +See <>. [role="exclude",id="search-request-track-total-hits"] === Track total hits parameter for request body search API @@ -1014,12 +1014,21 @@ See <>. [role="exclude",id="highlighter-internal-work"] ==== How highlighters work internally +See <>. + +[role="exclude",id="request-body-search-inner-hits"] +=== Inner hits +See <>. + [role="exclude",id="request-body-search-queries-and-filters"] === Named queries See <. -See <>. +[role="exclude",id="request-body-search-script-fields"] +==== Script fields + +See <>. [role="exclude",id="request-body-search-scroll"] ==== Scroll @@ -1037,7 +1046,7 @@ See <>. See <>. [role="exclude",id="request-body-search-search-after"] -==== Search After +==== Search after See <>. @@ -1050,4 +1059,9 @@ See <>. ==== Source filtering See <>. + +[role="exclude",id="request-body-search-stored-fields"] +==== Stored fields + +See <>. //// diff --git a/docs/reference/scripting/fields.asciidoc b/docs/reference/scripting/fields.asciidoc index 0e994d1048e54..e83b54c932882 100644 --- a/docs/reference/scripting/fields.asciidoc +++ b/docs/reference/scripting/fields.asciidoc @@ -19,7 +19,7 @@ API will have access to the `ctx` variable which exposes: [discrete] == Search and aggregation scripts -With the exception of <> which are +With the exception of <> which are executed once per search hit, scripts used in search and aggregations will be executed once for every document which might match a query or an aggregation. Depending on how many documents you have, this could mean millions or billions @@ -157,7 +157,7 @@ values are optimised for accessing the value of a specific field in many documents. It makes sense to use `_source` when generating a -<> for the top ten hits from a +<> for the top ten hits from a search result but, for other search and aggregation use cases, always prefer using doc values. ========================================================= diff --git a/docs/reference/scripting/security.asciidoc b/docs/reference/scripting/security.asciidoc index 385d0217686f3..426305df562aa 100644 --- a/docs/reference/scripting/security.asciidoc +++ b/docs/reference/scripting/security.asciidoc @@ -79,9 +79,15 @@ security of the Elasticsearch deployment. === Allowed script types setting Elasticsearch supports two script types: `inline` and `stored` (<>). -By default both types of scripts are allowed to be executed. To limit the types of scripts -that can run, set `script.allowed_types` to `inline`, `stored`, or `none`. For example, -to run `inline` scripts but not `stored` scripts, specify: +By default, {es} is configured to run both types of scripts. +To limit what type of scripts are run, set `script.allowed_types` to `inline` or `stored`. +To prevent any scripts from running, set `script.allowed_types` to `none`. + +IMPORTANT: If you use {kib}, set `script.allowed_types` to `both` or `inline`. +Some {kib} features rely on inline scripts and do not function as expected +if {es} does not allow inline scripts. + +For example, to run `inline` scripts but not `stored` scripts, specify: [source,yaml] ---- @@ -90,6 +96,7 @@ script.allowed_types: inline <1> <1> This will allow only inline scripts to be executed but not stored scripts (or any other types). + [[allowed-script-contexts-setting]] [discrete] === Allowed script contexts setting diff --git a/docs/reference/scripting/using.asciidoc b/docs/reference/scripting/using.asciidoc index 79d0accc85bc9..b4c41913b42bc 100644 --- a/docs/reference/scripting/using.asciidoc +++ b/docs/reference/scripting/using.asciidoc @@ -18,7 +18,7 @@ the same pattern: <3> Any named parameters that should be passed into the script. For example, the following script is used in a search request to return a -<>: +<>: [source,console] ------------------------------------- diff --git a/docs/reference/search/request-body.asciidoc b/docs/reference/search/request-body.asciidoc index f56d6a49befee..98b06fede2bbb 100644 --- a/docs/reference/search/request-body.asciidoc +++ b/docs/reference/search/request-body.asciidoc @@ -125,7 +125,10 @@ See <>. include::request/index-boost.asciidoc[] -include::request/inner-hits.asciidoc[] +[[request-body-search-inner-hits]] +==== Inner hits + +See <>. include::request/min-score.asciidoc[] @@ -140,7 +143,10 @@ include::request/preference.asciidoc[] include::request/rescore.asciidoc[] -include::request/script-fields.asciidoc[] +[[request-body-search-script-fields]] +==== Script Fields + +See <>. [[request-body-search-scroll]] ==== Scroll @@ -174,6 +180,9 @@ See <>. See <>. -include::request/stored-fields.asciidoc[] +[[request-body-search-stored-fields]] +==== Stored fields + +See <>. include::request/track-total-hits.asciidoc[] diff --git a/docs/reference/search/request/collapse.asciidoc b/docs/reference/search/request/collapse.asciidoc index 2aea669938f29..4bf501042380a 100644 --- a/docs/reference/search/request/collapse.asciidoc +++ b/docs/reference/search/request/collapse.asciidoc @@ -72,7 +72,7 @@ GET /my-index-000001/_search <4> how to sort the document inside each group <5> the number of concurrent requests allowed to retrieve the `inner_hits` per group -See <> for the complete list of supported options and the format of the response. +See <> for the complete list of supported options and the format of the response. It is also possible to request multiple `inner_hits` for each collapsed hit. This can be useful when you want to get multiple representations of the collapsed hits. diff --git a/docs/reference/search/request/inner-hits.asciidoc b/docs/reference/search/request/inner-hits.asciidoc index f4077d24ebf8a..af984cd0c28de 100644 --- a/docs/reference/search/request/inner-hits.asciidoc +++ b/docs/reference/search/request/inner-hits.asciidoc @@ -1,5 +1,5 @@ -[[request-body-search-inner-hits]] -==== Inner hits +[[inner-hits]] +== Retrieve inner hits The <> and <> features allow the return of documents that have matches in a different scope. In the parent/child case, parent documents are returned based on matches in child @@ -54,7 +54,9 @@ If `inner_hits` is defined on a query that supports it then each search hit will -------------------------------------------------- // NOTCONSOLE -===== Options +[discrete] +[[inner-hits-options]] +=== Options Inner hits support the following options: @@ -72,13 +74,14 @@ Inner hits also supports the following per document features: * <> * <> * <> -* <> +* <> * <> * <> * <> +[discrete] [[nested-inner-hits]] -===== Nested inner hits +=== Nested inner hits The nested `inner_hits` can be used to include nested inner objects as inner hits to a search hit. @@ -193,8 +196,9 @@ An important default is that the `_source` returned in hits inside `inner_hits` So in the above example only the comment part is returned per nested hit and not the entire source of the top level document that contained the comment. +[discrete] [[nested-inner-hits-source]] -===== Nested inner hits and +_source+ +==== Nested inner hits and +_source+ Nested document don't have a `_source` field, because the entire source of document is stored with the root document under its `_source` field. To include the source of just the nested document, the source of the root document is parsed and just @@ -307,8 +311,9 @@ Response not included in text but tested for completeness sake. //// +[discrete] [[hierarchical-nested-inner-hits]] -===== Hierarchical levels of nested object fields and inner hits. +=== Hierarchical levels of nested object fields and inner hits. If a mapping has multiple levels of hierarchical nested object fields each level can be accessed via dot notated path. For example if there is a `comments` nested field that contains a `votes` nested field and votes should directly be returned @@ -426,8 +431,9 @@ Which would look like: This indirect referencing is only supported for nested inner hits. +[discrete] [[parent-child-inner-hits]] -===== Parent/child inner hits +=== Parent/child inner hits The parent/child `inner_hits` can be used to include parent or child: diff --git a/docs/reference/search/request/script-fields.asciidoc b/docs/reference/search/request/script-fields.asciidoc index a9452a2d22b31..4b1ed4c33e9d2 100644 --- a/docs/reference/search/request/script-fields.asciidoc +++ b/docs/reference/search/request/script-fields.asciidoc @@ -1,8 +1,9 @@ -[[request-body-search-script-fields]] -==== Script Fields +[discrete] +[[script-fields]] +=== Script fields -Allows to return a <> (based on different fields) for each hit, for example: +You can use the `script_fields` parameter to retrieve a <> (based on different fields) for each hit. For example: [source,console] -------------------------------------------------- diff --git a/docs/reference/search/request/stored-fields.asciidoc b/docs/reference/search/request/stored-fields.asciidoc index 1894a52fadee3..4d3f5ae1c45ff 100644 --- a/docs/reference/search/request/stored-fields.asciidoc +++ b/docs/reference/search/request/stored-fields.asciidoc @@ -1,7 +1,4 @@ -[[request-body-search-stored-fields]] -==== Stored Fields - -WARNING: The `stored_fields` parameter is about fields that are explicitly marked as +WARNING: The `stored_fields` parameter is for fields that are explicitly marked as stored in the mapping, which is off by default and generally not recommended. Use <> instead to select subsets of the original source document to be returned. @@ -45,9 +42,11 @@ Also only leaf fields can be returned via the `stored_fields` option. If an obje NOTE: On its own, `stored_fields` cannot be used to load fields in nested objects -- if a field contains a nested object in its path, then no data will be returned for that stored field. To access nested fields, `stored_fields` -must be used within an <> block. +must be used within an <> block. -===== Disable stored fields entirely +[discrete] +[[disable-stored-fields]] +==== Disable stored fields To disable the stored fields (and metadata fields) entirely use: `_none_`: diff --git a/docs/reference/search/search-fields.asciidoc b/docs/reference/search/search-fields.asciidoc index 64316711d0aa7..782205358a0aa 100644 --- a/docs/reference/search/search-fields.asciidoc +++ b/docs/reference/search/search-fields.asciidoc @@ -1,6 +1,8 @@ -[discrete] [[search-fields]] -=== Retrieve selected fields +== Retrieve selected fields from a search +++++ +Retrieve selected fields +++++ By default, each hit in the search response includes the document <>, which is the entire JSON object that was @@ -41,6 +43,11 @@ such as keywords and dates. get the values for specific stored fields (fields that use the <> mapping option). +If needed, you can use the <> parameter to +transform field values in the response using a script. However, scripts can’t +make use of {es}'s index structures or related optimizations. This can sometimes +result in slower search speeds. + You can find more detailed information on each of these methods in the following sections: @@ -48,6 +55,7 @@ following sections: * <> * <> * <> +* <> [discrete] [[search-fields-param]] @@ -211,7 +219,7 @@ GET my-index-000001/_search TIP: You cannot use the `docvalue_fields` parameter to retrieve doc values for nested objects. If you specify a nested object, the search returns an empty array (`[ ]`) for the field. To access nested fields, use the -<> parameter's `docvalue_fields` +<> parameter's `docvalue_fields` property. [discrete] @@ -220,8 +228,9 @@ property. It's also possible to store an individual field's values by using the <> mapping option. You can use the -<> parameter to include -these stored values in the search response. +`stored_fields` parameter to include these stored values in the search response. + +include::request/stored-fields.asciidoc[] [discrete] [[source-filtering]] @@ -308,3 +317,5 @@ GET /_search } } ---- + +include::request/script-fields.asciidoc[] \ No newline at end of file diff --git a/docs/reference/search/search-your-data.asciidoc b/docs/reference/search/search-your-data.asciidoc index a021e7e2eccaf..cd6883e6a5b43 100644 --- a/docs/reference/search/search-your-data.asciidoc +++ b/docs/reference/search/search-your-data.asciidoc @@ -70,6 +70,10 @@ The response sorts documents in `hits.hits` by `_score`, a <> that measures how well each document matches the query. +The `hit.hits` property also includes the <> for +each matching document. To retrieve only a subset of the `_source` or other +fields, see <>. + [source,console-result] ---- { @@ -223,11 +227,12 @@ GET /*/_search ---- // TEST[setup:my_index] -include::search-fields.asciidoc[] include::request/collapse.asciidoc[] include::request/highlighting.asciidoc[] -include::paginate-search-results.asciidoc[] -include::request/sort.asciidoc[] include::{es-repo-dir}/async-search.asciidoc[] -include::{es-repo-dir}/modules/cross-cluster-search.asciidoc[] include::{es-repo-dir}/search/near-real-time.asciidoc[] +include::paginate-search-results.asciidoc[] +include::request/inner-hits.asciidoc[] +include::search-fields.asciidoc[] +include::{es-repo-dir}/modules/cross-cluster-search.asciidoc[] +include::request/sort.asciidoc[] diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index 6ff5b5c099676..8f907a7d20810 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -570,8 +570,8 @@ Contains field values for the documents. These fields must be specified in the request using one or more of the following request parameters: * <> -* <> -* <> +* <> +* <> This property is returned only if one or more of these parameters are set. -- diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index be1aee9632ac1..4a5e52c34c9e6 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -2,7 +2,7 @@ === Install {es} with Docker {es} is also available as Docker images. -The images use https://hub.docker.com/_/centos/[centos:7] as the base image. +The images use https://hub.docker.com/_/centos/[centos:8] as the base image. A list of all published Docker images and tags is available at https://www.docker.elastic.co[www.docker.elastic.co]. The source files @@ -199,6 +199,18 @@ sudo sysctl -w vm.max_map_count=262144 -------------------------------------------- -- +* Windows with https://docs.docker.com/docker-for-windows/wsl[Docker Desktop WSL 2 backend] ++ +-- +The `vm.max_map_count` setting must be set in the docker-desktop container: + +[source,sh] +-------------------------------------------- +wsl -d docker-desktop +sysctl -w vm.max_map_count=262144 +-------------------------------------------- +-- + ===== Configuration files must be readable by the `elasticsearch` user By default, {es} runs inside the container as user `elasticsearch` using @@ -232,7 +244,7 @@ To check the Docker daemon defaults for ulimits, run: [source,sh] -------------------------------------------- -docker run --rm centos:7 /bin/bash -c 'ulimit -Hn && ulimit -Sn && ulimit -Hu && ulimit -Su' +docker run --rm centos:8 /bin/bash -c 'ulimit -Hn && ulimit -Sn && ulimit -Hu && ulimit -Su' -------------------------------------------- If needed, adjust them in the Daemon or override them per container. diff --git a/gradle/fips.gradle b/gradle/fips.gradle index b497393de5600..586b4f53c2405 100644 --- a/gradle/fips.gradle +++ b/gradle/fips.gradle @@ -5,12 +5,14 @@ import org.elasticsearch.gradle.testclusters.ElasticsearchCluster // Common config when running with a FIPS-140 runtime JVM if (BuildParams.inFipsJvm) { + + allprojects { File fipsResourcesDir = new File(project.buildDir, 'fips-resources') File fipsSecurity = new File(fipsResourcesDir, 'fips_java.security') File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks') - project.pluginManager.withPlugin('elasticsearch.java') { + pluginManager.withPlugin('elasticsearch.java') { TaskProvider fipsResourcesTask = project.tasks.register('fipsResources', ExportElasticsearchBuildResourcesTask) fipsResourcesTask.configure { outputDir = fipsResourcesDir @@ -18,17 +20,22 @@ if (BuildParams.inFipsJvm) { copy 'fips_java.policy' copy 'cacerts.bcfks' } - // This configuration can be removed once system modules are available - configurations.create('extraFipsJars') - dependencies { - extraFipsJars 'org.bouncycastle:bc-fips:1.0.1' - extraFipsJars 'org.bouncycastle:bctls-fips:1.0.9' - } + + pluginManager.withPlugin("elasticsearch.testclusters") { - testClusters.all { - for (File dep : project.configurations.extraFipsJars.files) { - extraJarFile dep + afterEvaluate { + // This afterEvaluate hooks is required to avoid deprecated configuration resolution + // This configuration can be removed once system modules are available + def extraFipsJars = configurations.detachedConfiguration(dependencies.create('org.bouncycastle:bc-fips:1.0.1'), + dependencies.create('org.bouncycastle:bctls-fips:1.0.9'), + ) + testClusters.all { + extraFipsJars.files.each { + extraJarFile it + } } + } + testClusters.all { extraConfigFile "fips_java.security", fipsSecurity extraConfigFile "fips_java.policy", fipsPolicy extraConfigFile "cacerts.bcfks", fipsTrustStore diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java index c6f33aa15db1d..4223931c4e32c 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregatorTests.java @@ -53,9 +53,9 @@ public void testNoData() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") .fields(Collections.singletonList("field")); - InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft); + InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); assertNull(stats.getStats()); - assertFalse(MatrixAggregationInspectionHelper.hasValue(stats)); + assertEquals(0L, stats.getDocCount()); } } } @@ -72,9 +72,9 @@ public void testUnmapped() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") .fields(Collections.singletonList("bogus")); - InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft); + InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); assertNull(stats.getStats()); - assertFalse(MatrixAggregationInspectionHelper.hasValue(stats)); + assertEquals(0L, stats.getDocCount()); } } } @@ -85,43 +85,6 @@ public void testTwoFields() throws Exception { String fieldB = "b"; MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE); - try (Directory directory = newDirectory(); - RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { - - int numDocs = scaledRandomIntBetween(8192, 16384); - Double[] fieldAValues = new Double[numDocs]; - Double[] fieldBValues = new Double[numDocs]; - for (int docId = 0; docId < numDocs; docId++) { - Document document = new Document(); - fieldAValues[docId] = randomDouble(); - document.add(new SortedNumericDocValuesField(fieldA, NumericUtils.doubleToSortableLong(fieldAValues[docId]))); - - fieldBValues[docId] = randomDouble(); - document.add(new SortedNumericDocValuesField(fieldB, NumericUtils.doubleToSortableLong(fieldBValues[docId]))); - indexWriter.addDocument(document); - } - - MultiPassStats multiPassStats = new MultiPassStats(fieldA, fieldB); - multiPassStats.computeStats(Arrays.asList(fieldAValues), Arrays.asList(fieldBValues)); - try (IndexReader reader = indexWriter.getReader()) { - IndexSearcher searcher = new IndexSearcher(reader); - MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") - .fields(Arrays.asList(fieldA, fieldB)); - InternalMatrixStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB); - // Since `search` doesn't do any reduction, and the InternalMatrixStats object will have a null `MatrixStatsResults` - // object. That is created during the final reduction, which also does a final round of computations - // So we have to create a MatrixStatsResults object here manually so that the final `compute()` is called - multiPassStats.assertNearlyEqual(new MatrixStatsResults(stats.getStats())); - } - } - } - - public void testTwoFieldsReduce() throws Exception { - String fieldA = "a"; - MappedFieldType ftA = new NumberFieldMapper.NumberFieldType(fieldA, NumberFieldMapper.NumberType.DOUBLE); - String fieldB = "b"; - MappedFieldType ftB = new NumberFieldMapper.NumberFieldType(fieldB, NumberFieldMapper.NumberType.DOUBLE); - try (Directory directory = newDirectory(); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -145,8 +108,6 @@ public void testTwoFieldsReduce() throws Exception { MatrixStatsAggregationBuilder aggBuilder = new MatrixStatsAggregationBuilder("my_agg") .fields(Arrays.asList(fieldA, fieldB)); InternalMatrixStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ftA, ftB); - // Unlike testTwoFields, `searchAndReduce` will execute reductions so the `MatrixStatsResults` object - // will be populated and fully computed. We should use that value directly to test against multiPassStats.assertNearlyEqual(stats); assertTrue(MatrixAggregationInspectionHelper.hasValue(stats)); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AccessNode.java similarity index 95% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AccessNode.java index 493dd69171c7f..21c3cb45cc18c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/BraceNode.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/AccessNode.java @@ -24,13 +24,13 @@ import org.elasticsearch.painless.phase.IRTreeVisitor; import org.elasticsearch.painless.symbol.WriteScope; -public class BraceNode extends BinaryNode { +public class AccessNode extends BinaryNode { /* ---- begin visitor ---- */ @Override public Output visit(IRTreeVisitor irTreeVisitor, Input input) { - return irTreeVisitor.visitBrace(this, input); + return irTreeVisitor.visitAccess(this, input); } /* ---- end visitor ---- */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java deleted file mode 100644 index 2ebff20ddc513..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/CallNode.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.ir; - -import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.phase.IRTreeVisitor; -import org.elasticsearch.painless.symbol.WriteScope; - -public class CallNode extends BinaryNode { - - /* ---- begin visitor ---- */ - - @Override - public Output visit(IRTreeVisitor irTreeVisitor, Input input) { - return irTreeVisitor.visitCall(this, input); - } - - /* ---- end visitor ---- */ - - @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - getLeftNode().write(classWriter, methodWriter, writeScope); - getRightNode().write(classWriter, methodWriter, writeScope); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java deleted file mode 100644 index 544a08da4f4f0..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ir/DotNode.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.ir; - -import org.elasticsearch.painless.ClassWriter; -import org.elasticsearch.painless.MethodWriter; -import org.elasticsearch.painless.phase.IRTreeVisitor; -import org.elasticsearch.painless.symbol.WriteScope; - -public class DotNode extends BinaryNode { - - /* ---- begin visitor ---- */ - - @Override - public Output visit(IRTreeVisitor irTreeVisitor, Input input) { - return irTreeVisitor.visitDot(this, input); - } - - /* ---- end visitor ---- */ - - @Override - protected void write(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - getLeftNode().write(classWriter, methodWriter, writeScope); - getRightNode().write(classWriter, methodWriter, writeScope); - } - - @Override - protected int accessElementCount() { - return getRightNode().accessElementCount(); - } - - protected void setup(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - getLeftNode().write(classWriter, methodWriter, writeScope); - getRightNode().setup(classWriter, methodWriter, writeScope); - } - - @Override - protected void load(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - getRightNode().load(classWriter, methodWriter, writeScope); - } - - @Override - protected void store(ClassWriter classWriter, MethodWriter methodWriter, WriteScope writeScope) { - getRightNode().store(classWriter, methodWriter, writeScope); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECall.java index 38d66347f3bae..eafcddc1d3feb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECall.java @@ -20,9 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.phase.DefaultSemanticAnalysisPhase; import org.elasticsearch.painless.phase.UserTreeVisitor; -import org.elasticsearch.painless.symbol.SemanticScope; import java.util.Collections; import java.util.List; @@ -78,10 +76,4 @@ public void visitChildren(UserTreeVisitor userTreeVisitor, Scope argumentNode.visit(userTreeVisitor, scope); } } - - public static void visitDefaultSemanticAnalysis( - DefaultSemanticAnalysisPhase visitor, ECall userCallNode, SemanticScope semanticScope) { - - - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultUserTreeToIRTreePhase.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultUserTreeToIRTreePhase.java index 0379055b13dcb..ffd1bb1d5efb3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultUserTreeToIRTreePhase.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/DefaultUserTreeToIRTreePhase.java @@ -21,15 +21,14 @@ import org.elasticsearch.painless.DefBootstrap; import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.ir.AccessNode; import org.elasticsearch.painless.ir.AssignmentNode; import org.elasticsearch.painless.ir.BinaryMathNode; import org.elasticsearch.painless.ir.BlockNode; import org.elasticsearch.painless.ir.BooleanNode; -import org.elasticsearch.painless.ir.BraceNode; import org.elasticsearch.painless.ir.BraceSubDefNode; import org.elasticsearch.painless.ir.BraceSubNode; import org.elasticsearch.painless.ir.BreakNode; -import org.elasticsearch.painless.ir.CallNode; import org.elasticsearch.painless.ir.CallSubDefNode; import org.elasticsearch.painless.ir.CallSubNode; import org.elasticsearch.painless.ir.CastNode; @@ -44,7 +43,6 @@ import org.elasticsearch.painless.ir.DeclarationNode; import org.elasticsearch.painless.ir.DefInterfaceReferenceNode; import org.elasticsearch.painless.ir.DoWhileLoopNode; -import org.elasticsearch.painless.ir.DotNode; import org.elasticsearch.painless.ir.DotSubArrayLengthNode; import org.elasticsearch.painless.ir.DotSubDefNode; import org.elasticsearch.painless.ir.DotSubNode; @@ -209,64 +207,64 @@ protected void injectBootstrapMethod(ScriptScope scriptScope) { Location internalLocation = new Location("$internal$injectStaticFields", 0); int modifiers = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC; - FieldNode fieldNode = new FieldNode(); - fieldNode.setLocation(internalLocation); - fieldNode.setModifiers(modifiers); - fieldNode.setFieldType(PainlessLookup.class); - fieldNode.setName("$DEFINITION"); + FieldNode irFieldNode = new FieldNode(); + irFieldNode.setLocation(internalLocation); + irFieldNode.setModifiers(modifiers); + irFieldNode.setFieldType(PainlessLookup.class); + irFieldNode.setName("$DEFINITION"); - irClassNode.addFieldNode(fieldNode); + irClassNode.addFieldNode(irFieldNode); - fieldNode = new FieldNode(); - fieldNode.setLocation(internalLocation); - fieldNode.setModifiers(modifiers); - fieldNode.setFieldType(FunctionTable.class); - fieldNode.setName("$FUNCTIONS"); + irFieldNode = new FieldNode(); + irFieldNode.setLocation(internalLocation); + irFieldNode.setModifiers(modifiers); + irFieldNode.setFieldType(FunctionTable.class); + irFieldNode.setName("$FUNCTIONS"); - irClassNode.addFieldNode(fieldNode); + irClassNode.addFieldNode(irFieldNode); // adds the bootstrap method required for dynamic binding for def type resolution internalLocation = new Location("$internal$injectDefBootstrapMethod", 0); try { - FunctionNode functionNode = new FunctionNode(); - functionNode.setLocation(internalLocation); - functionNode.setReturnType(CallSite.class); - functionNode.setName("$bootstrapDef"); - functionNode.getTypeParameters().addAll( + FunctionNode irFunctionNode = new FunctionNode(); + irFunctionNode.setLocation(internalLocation); + irFunctionNode.setReturnType(CallSite.class); + irFunctionNode.setName("$bootstrapDef"); + irFunctionNode.getTypeParameters().addAll( Arrays.asList(Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class)); - functionNode.getParameterNames().addAll( + irFunctionNode.getParameterNames().addAll( Arrays.asList("methodHandlesLookup", "name", "type", "initialDepth", "flavor", "args")); - functionNode.setStatic(true); - functionNode.setVarArgs(true); - functionNode.setSynthetic(true); - functionNode.setMaxLoopCounter(0); + irFunctionNode.setStatic(true); + irFunctionNode.setVarArgs(true); + irFunctionNode.setSynthetic(true); + irFunctionNode.setMaxLoopCounter(0); - irClassNode.addFunctionNode(functionNode); + irClassNode.addFunctionNode(irFunctionNode); BlockNode blockNode = new BlockNode(); blockNode.setLocation(internalLocation); blockNode.setAllEscape(true); blockNode.setStatementCount(1); - functionNode.setBlockNode(blockNode); + irFunctionNode.setBlockNode(blockNode); ReturnNode returnNode = new ReturnNode(); returnNode.setLocation(internalLocation); blockNode.addStatementNode(returnNode); - CallNode callNode = new CallNode(); - callNode.setLocation(internalLocation); - callNode.setExpressionType(CallSite.class); + AccessNode irAccessNode = new AccessNode(); + irAccessNode.setLocation(internalLocation); + irAccessNode.setExpressionType(CallSite.class); - returnNode.setExpressionNode(callNode); + returnNode.setExpressionNode(irAccessNode); StaticNode staticNode = new StaticNode(); staticNode.setLocation(internalLocation); staticNode.setExpressionType(DefBootstrap.class); - callNode.setLeftNode(staticNode); + irAccessNode.setLeftNode(staticNode); CallSubNode callSubNode = new CallSubNode(); callSubNode.setLocation(internalLocation); @@ -299,7 +297,7 @@ protected void injectBootstrapMethod(ScriptScope scriptScope) { ); callSubNode.setBox(DefBootstrap.class); - callNode.setRightNode(callSubNode); + irAccessNode.setRightNode(callSubNode); MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); memberFieldLoadNode.setLocation(internalLocation); @@ -1041,17 +1039,17 @@ public void visitRegex(ERegex userRegexNode, ScriptScope scriptScope) { irStatementExpressionNode.setExpressionNode(irMemberFieldStoreNode); - CallNode irCallNode = new CallNode(); - irCallNode.setLocation(userRegexNode.getLocation()); - irCallNode.setExpressionType(Pattern.class); + AccessNode irAccessNode = new AccessNode(); + irAccessNode.setLocation(userRegexNode.getLocation()); + irAccessNode.setExpressionType(Pattern.class); - irMemberFieldStoreNode.setChildNode(irCallNode); + irMemberFieldStoreNode.setChildNode(irAccessNode); StaticNode irStaticNode = new StaticNode(); irStaticNode.setLocation(userRegexNode.getLocation()); irStaticNode.setExpressionType(Pattern.class); - irCallNode.setLeftNode(irStaticNode); + irAccessNode.setLeftNode(irStaticNode); CallSubNode irCallSubNode = new CallSubNode(); irCallSubNode.setLocation(userRegexNode.getLocation()); @@ -1068,7 +1066,7 @@ public void visitRegex(ERegex userRegexNode, ScriptScope scriptScope) { ) ); - irCallNode.setRightNode(irCallSubNode); + irAccessNode.setRightNode(irCallSubNode); ConstantNode irConstantNode = new ConstantNode(); irConstantNode.setLocation(userRegexNode.getLocation()); @@ -1352,12 +1350,12 @@ public void visitDot(EDot userDotNode, ScriptScope scriptScope) { irExpressionNode = irNullSafeSubNode; } - DotNode irDotNode = new DotNode(); - irDotNode.setLeftNode((ExpressionNode)visit(userDotNode.getPrefixNode(), scriptScope)); - irDotNode.setRightNode(irExpressionNode); - irDotNode.setLocation(irExpressionNode.getLocation()); - irDotNode.setExpressionType(irExpressionNode.getExpressionType()); - irExpressionNode = irDotNode; + AccessNode irAccessNode = new AccessNode(); + irAccessNode.setLeftNode((ExpressionNode)visit(userDotNode.getPrefixNode(), scriptScope)); + irAccessNode.setRightNode(irExpressionNode); + irAccessNode.setLocation(irExpressionNode.getLocation()); + irAccessNode.setExpressionType(irExpressionNode.getExpressionType()); + irExpressionNode = irAccessNode; } scriptScope.putDecoration(userDotNode, new IRNodeDecoration(irExpressionNode)); @@ -1419,13 +1417,13 @@ public void visitBrace(EBrace userBraceNode, ScriptScope scriptScope) { throw userBraceNode.createError(new IllegalStateException("illegal tree structure")); } - BraceNode irBraceNode = new BraceNode(); - irBraceNode.setLeftNode((ExpressionNode)visit(userBraceNode.getPrefixNode(), scriptScope)); - irBraceNode.setRightNode(irExpressionNode); - irBraceNode.setLocation(irExpressionNode.getLocation()); - irBraceNode.setExpressionType(irExpressionNode.getExpressionType()); + AccessNode irAccessNode = new AccessNode(); + irAccessNode.setLeftNode((ExpressionNode)visit(userBraceNode.getPrefixNode(), scriptScope)); + irAccessNode.setRightNode(irExpressionNode); + irAccessNode.setLocation(irExpressionNode.getLocation()); + irAccessNode.setExpressionType(irExpressionNode.getExpressionType()); - scriptScope.putDecoration(userBraceNode, new IRNodeDecoration(irBraceNode)); + scriptScope.putDecoration(userBraceNode, new IRNodeDecoration(irAccessNode)); } @Override @@ -1475,12 +1473,12 @@ public void visitCall(ECall userCallNode, ScriptScope scriptScope) { irExpressionNode = irNullSafeSubNode; } - CallNode irCallNode = new CallNode(); - irCallNode.setLeftNode((ExpressionNode)visit(userCallNode.getPrefixNode(), scriptScope)); - irCallNode.setRightNode(irExpressionNode); - irCallNode.setLocation(irExpressionNode.getLocation()); - irCallNode.setExpressionType(irExpressionNode.getExpressionType()); + AccessNode irAccessNode = new AccessNode(); + irAccessNode.setLeftNode((ExpressionNode)visit(userCallNode.getPrefixNode(), scriptScope)); + irAccessNode.setRightNode(irExpressionNode); + irAccessNode.setLocation(irExpressionNode.getLocation()); + irAccessNode.setExpressionType(irExpressionNode.getExpressionType()); - scriptScope.putDecoration(userCallNode, new IRNodeDecoration(irCallNode)); + scriptScope.putDecoration(userCallNode, new IRNodeDecoration(irAccessNode)); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeBaseVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeBaseVisitor.java index 31c5a61136c06..ef9a89409e16a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeBaseVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeBaseVisitor.java @@ -19,15 +19,14 @@ package org.elasticsearch.painless.phase; +import org.elasticsearch.painless.ir.AccessNode; import org.elasticsearch.painless.ir.AssignmentNode; import org.elasticsearch.painless.ir.BinaryMathNode; import org.elasticsearch.painless.ir.BlockNode; import org.elasticsearch.painless.ir.BooleanNode; -import org.elasticsearch.painless.ir.BraceNode; import org.elasticsearch.painless.ir.BraceSubDefNode; import org.elasticsearch.painless.ir.BraceSubNode; import org.elasticsearch.painless.ir.BreakNode; -import org.elasticsearch.painless.ir.CallNode; import org.elasticsearch.painless.ir.CallSubDefNode; import org.elasticsearch.painless.ir.CallSubNode; import org.elasticsearch.painless.ir.CastNode; @@ -41,7 +40,6 @@ import org.elasticsearch.painless.ir.DeclarationNode; import org.elasticsearch.painless.ir.DefInterfaceReferenceNode; import org.elasticsearch.painless.ir.DoWhileLoopNode; -import org.elasticsearch.painless.ir.DotNode; import org.elasticsearch.painless.ir.DotSubArrayLengthNode; import org.elasticsearch.painless.ir.DotSubDefNode; import org.elasticsearch.painless.ir.DotSubNode; @@ -291,7 +289,7 @@ public Output visitNullSafeSub(NullSafeSubNode irNullSafeSubNode, Input input) { } @Override - public Output visitDot(DotNode irDotNode, Input input) { + public Output visitAccess(AccessNode irAccessNode, Input input) { throw new UnsupportedOperationException(); } @@ -335,11 +333,6 @@ public Output visitMemberFieldStore(MemberFieldStoreNode irMemberFieldStoreNode, throw new UnsupportedOperationException(); } - @Override - public Output visitBrace(BraceNode irBraceNode, Input input) { - throw new UnsupportedOperationException(); - } - @Override public Output visitBraceSubDef(BraceSubDefNode irBraceSubDefNode, Input input) { throw new UnsupportedOperationException(); @@ -350,11 +343,6 @@ public Output visitBraceSub(BraceSubNode irBraceSubNode, Input input) { throw new UnsupportedOperationException(); } - @Override - public Output visitCall(CallNode irCallNode, Input input) { - throw new UnsupportedOperationException(); - } - @Override public Output visitCallSubDef(CallSubDefNode irCallSubDefNode, Input input) { throw new UnsupportedOperationException(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeVisitor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeVisitor.java index 8d99e7f1723d8..bad2fa5f82cc6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeVisitor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/IRTreeVisitor.java @@ -19,15 +19,14 @@ package org.elasticsearch.painless.phase; +import org.elasticsearch.painless.ir.AccessNode; import org.elasticsearch.painless.ir.AssignmentNode; import org.elasticsearch.painless.ir.BinaryMathNode; import org.elasticsearch.painless.ir.BlockNode; import org.elasticsearch.painless.ir.BooleanNode; -import org.elasticsearch.painless.ir.BraceNode; import org.elasticsearch.painless.ir.BraceSubDefNode; import org.elasticsearch.painless.ir.BraceSubNode; import org.elasticsearch.painless.ir.BreakNode; -import org.elasticsearch.painless.ir.CallNode; import org.elasticsearch.painless.ir.CallSubDefNode; import org.elasticsearch.painless.ir.CallSubNode; import org.elasticsearch.painless.ir.CastNode; @@ -41,7 +40,6 @@ import org.elasticsearch.painless.ir.DeclarationNode; import org.elasticsearch.painless.ir.DefInterfaceReferenceNode; import org.elasticsearch.painless.ir.DoWhileLoopNode; -import org.elasticsearch.painless.ir.DotNode; import org.elasticsearch.painless.ir.DotSubArrayLengthNode; import org.elasticsearch.painless.ir.DotSubDefNode; import org.elasticsearch.painless.ir.DotSubNode; @@ -124,7 +122,7 @@ public interface IRTreeVisitor { Output visitStatic(StaticNode irStaticNode, Input input); Output visitVariable(VariableNode irVariableNode, Input input); Output visitNullSafeSub(NullSafeSubNode irNullSafeSubNode, Input input); - Output visitDot(DotNode irDotNode, Input input); + Output visitAccess(AccessNode irAccessNode, Input input); Output visitDotSubArrayLength(DotSubArrayLengthNode irDotSubArrayLengthNode, Input input); Output visitDotSubDef(DotSubDefNode irDotSubDefNode, Input input); Output visitDotSub(DotSubNode irDotSubNode, Input input); @@ -133,10 +131,8 @@ public interface IRTreeVisitor { Output visitMapSubShortcut(MapSubShortcutNode irMapSubShorcutNode, Input input); Output visitMemberFieldLoad(MemberFieldLoadNode irMemberFieldLoadNode, Input input); Output visitMemberFieldStore(MemberFieldStoreNode irMemberFieldStoreNode, Input input); - Output visitBrace(BraceNode irBraceNode, Input input); Output visitBraceSubDef(BraceSubDefNode irBraceSubDefNode, Input input); Output visitBraceSub(BraceSubNode irBraceSubNode, Input input); - Output visitCall(CallNode irCallNode, Input input); Output visitCallSubDef(CallSubDefNode irCallSubDefNode, Input input); Output visitCallSub(CallSubNode irCallSubNode, Input input); Output visitMemberCall(MemberCallNode irMemberCallNode, Input input); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java index 08f46f662fe69..0c044175737a9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/phase/PainlessUserTreeToIRTreePhase.java @@ -24,11 +24,10 @@ import org.elasticsearch.painless.PainlessExplainError; import org.elasticsearch.painless.ScriptClassInfo; import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; +import org.elasticsearch.painless.ir.AccessNode; import org.elasticsearch.painless.ir.BlockNode; -import org.elasticsearch.painless.ir.CallNode; import org.elasticsearch.painless.ir.CallSubNode; import org.elasticsearch.painless.ir.CatchNode; -import org.elasticsearch.painless.ir.ClassNode; import org.elasticsearch.painless.ir.ConstantNode; import org.elasticsearch.painless.ir.DeclarationNode; import org.elasticsearch.painless.ir.ExpressionNode; @@ -87,28 +86,28 @@ public void visitFunction(SFunction userFunctionNode, ScriptScope scriptScope) { irExpressionNode = null; } else { if (returnType.isPrimitive()) { - ConstantNode constantNode = new ConstantNode(); - constantNode.setLocation(userFunctionNode.getLocation()); - constantNode.setExpressionType(returnType); + ConstantNode irConstantNode = new ConstantNode(); + irConstantNode.setLocation(userFunctionNode.getLocation()); + irConstantNode.setExpressionType(returnType); if (returnType == boolean.class) { - constantNode.setConstant(false); + irConstantNode.setConstant(false); } else if (returnType == byte.class || returnType == char.class || returnType == short.class || returnType == int.class) { - constantNode.setConstant(0); + irConstantNode.setConstant(0); } else if (returnType == long.class) { - constantNode.setConstant(0L); + irConstantNode.setConstant(0L); } else if (returnType == float.class) { - constantNode.setConstant(0f); + irConstantNode.setConstant(0f); } else if (returnType == double.class) { - constantNode.setConstant(0d); + irConstantNode.setConstant(0d); } else { throw userFunctionNode.createError(new IllegalStateException("illegal tree structure")); } - irExpressionNode = constantNode; + irExpressionNode = irConstantNode; } else { irExpressionNode = new NullNode(); irExpressionNode.setLocation(userFunctionNode.getLocation()); @@ -141,7 +140,7 @@ public void visitFunction(SFunction userFunctionNode, ScriptScope scriptScope) { irFunctionNode.setSynthetic(false); irFunctionNode.setMaxLoopCounter(scriptScope.getCompilerSettings().getMaxLoopCounter()); - injectStaticFieldsAndGetters(irClassNode); + injectStaticFieldsAndGetters(); injectGetsDeclarations(irBlockNode, scriptScope); injectNeedsMethods(scriptScope); injectSandboxExceptions(irFunctionNode); @@ -153,133 +152,133 @@ public void visitFunction(SFunction userFunctionNode, ScriptScope scriptScope) { } // adds static fields and getter methods required by PainlessScript for exception handling - protected void injectStaticFieldsAndGetters(ClassNode classNode) { + protected void injectStaticFieldsAndGetters() { Location internalLocation = new Location("$internal$ScriptInjectionPhase$injectStaticFieldsAndGetters", 0); int modifiers = Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC; - FieldNode fieldNode = new FieldNode(); - fieldNode.setLocation(internalLocation); - fieldNode.setModifiers(modifiers); - fieldNode.setFieldType(String.class); - fieldNode.setName("$NAME"); + FieldNode irFieldNode = new FieldNode(); + irFieldNode.setLocation(internalLocation); + irFieldNode.setModifiers(modifiers); + irFieldNode.setFieldType(String.class); + irFieldNode.setName("$NAME"); - classNode.addFieldNode(fieldNode); + irClassNode.addFieldNode(irFieldNode); - fieldNode = new FieldNode(); - fieldNode.setLocation(internalLocation); - fieldNode.setModifiers(modifiers); - fieldNode.setFieldType(String.class); - fieldNode.setName("$SOURCE"); + irFieldNode = new FieldNode(); + irFieldNode.setLocation(internalLocation); + irFieldNode.setModifiers(modifiers); + irFieldNode.setFieldType(String.class); + irFieldNode.setName("$SOURCE"); - classNode.addFieldNode(fieldNode); + irClassNode.addFieldNode(irFieldNode); - fieldNode = new FieldNode(); - fieldNode.setLocation(internalLocation); - fieldNode.setModifiers(modifiers); - fieldNode.setFieldType(BitSet.class); - fieldNode.setName("$STATEMENTS"); + irFieldNode = new FieldNode(); + irFieldNode.setLocation(internalLocation); + irFieldNode.setModifiers(modifiers); + irFieldNode.setFieldType(BitSet.class); + irFieldNode.setName("$STATEMENTS"); - classNode.addFieldNode(fieldNode); + irClassNode.addFieldNode(irFieldNode); - FunctionNode functionNode = new FunctionNode(); - functionNode.setLocation(internalLocation); - functionNode.setName("getName"); - functionNode.setReturnType(String.class); - functionNode.setStatic(false); - functionNode.setVarArgs(false); - functionNode.setSynthetic(true); - functionNode.setMaxLoopCounter(0); + FunctionNode irFunctionNode = new FunctionNode(); + irFunctionNode.setLocation(internalLocation); + irFunctionNode.setName("getName"); + irFunctionNode.setReturnType(String.class); + irFunctionNode.setStatic(false); + irFunctionNode.setVarArgs(false); + irFunctionNode.setSynthetic(true); + irFunctionNode.setMaxLoopCounter(0); - classNode.addFunctionNode(functionNode); + irClassNode.addFunctionNode(irFunctionNode); - BlockNode blockNode = new BlockNode(); - blockNode.setLocation(internalLocation); - blockNode.setAllEscape(true); - blockNode.setStatementCount(1); + BlockNode irBlockNode = new BlockNode(); + irBlockNode.setLocation(internalLocation); + irBlockNode.setAllEscape(true); + irBlockNode.setStatementCount(1); - functionNode.setBlockNode(blockNode); + irFunctionNode.setBlockNode(irBlockNode); - ReturnNode returnNode = new ReturnNode(); - returnNode.setLocation(internalLocation); + ReturnNode irReturnNode = new ReturnNode(); + irReturnNode.setLocation(internalLocation); - blockNode.addStatementNode(returnNode); + irBlockNode.addStatementNode(irReturnNode); - MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); - memberFieldLoadNode.setLocation(internalLocation); - memberFieldLoadNode.setExpressionType(String.class); - memberFieldLoadNode.setName("$NAME"); - memberFieldLoadNode.setStatic(true); + MemberFieldLoadNode irMemberFieldLoadNode = new MemberFieldLoadNode(); + irMemberFieldLoadNode.setLocation(internalLocation); + irMemberFieldLoadNode.setExpressionType(String.class); + irMemberFieldLoadNode.setName("$NAME"); + irMemberFieldLoadNode.setStatic(true); - returnNode.setExpressionNode(memberFieldLoadNode); + irReturnNode.setExpressionNode(irMemberFieldLoadNode); - functionNode = new FunctionNode(); - functionNode.setLocation(internalLocation); - functionNode.setName("getSource"); - functionNode.setReturnType(String.class); - functionNode.setStatic(false); - functionNode.setVarArgs(false); - functionNode.setSynthetic(true); - functionNode.setMaxLoopCounter(0); + irFunctionNode = new FunctionNode(); + irFunctionNode.setLocation(internalLocation); + irFunctionNode.setName("getSource"); + irFunctionNode.setReturnType(String.class); + irFunctionNode.setStatic(false); + irFunctionNode.setVarArgs(false); + irFunctionNode.setSynthetic(true); + irFunctionNode.setMaxLoopCounter(0); - classNode.addFunctionNode(functionNode); + irClassNode.addFunctionNode(irFunctionNode); - blockNode = new BlockNode(); - blockNode.setLocation(internalLocation); - blockNode.setAllEscape(true); - blockNode.setStatementCount(1); + irBlockNode = new BlockNode(); + irBlockNode.setLocation(internalLocation); + irBlockNode.setAllEscape(true); + irBlockNode.setStatementCount(1); - functionNode.setBlockNode(blockNode); + irFunctionNode.setBlockNode(irBlockNode); - returnNode = new ReturnNode(); - returnNode.setLocation(internalLocation); + irReturnNode = new ReturnNode(); + irReturnNode.setLocation(internalLocation); - blockNode.addStatementNode(returnNode); + irBlockNode.addStatementNode(irReturnNode); - memberFieldLoadNode = new MemberFieldLoadNode(); - memberFieldLoadNode.setLocation(internalLocation); - memberFieldLoadNode.setExpressionType(String.class); - memberFieldLoadNode.setName("$SOURCE"); - memberFieldLoadNode.setStatic(true); + irMemberFieldLoadNode = new MemberFieldLoadNode(); + irMemberFieldLoadNode.setLocation(internalLocation); + irMemberFieldLoadNode.setExpressionType(String.class); + irMemberFieldLoadNode.setName("$SOURCE"); + irMemberFieldLoadNode.setStatic(true); - returnNode.setExpressionNode(memberFieldLoadNode); + irReturnNode.setExpressionNode(irMemberFieldLoadNode); - functionNode = new FunctionNode(); - functionNode.setLocation(internalLocation); - functionNode.setName("getStatements"); - functionNode.setReturnType(BitSet.class); - functionNode.setStatic(false); - functionNode.setVarArgs(false); - functionNode.setSynthetic(true); - functionNode.setMaxLoopCounter(0); + irFunctionNode = new FunctionNode(); + irFunctionNode.setLocation(internalLocation); + irFunctionNode.setName("getStatements"); + irFunctionNode.setReturnType(BitSet.class); + irFunctionNode.setStatic(false); + irFunctionNode.setVarArgs(false); + irFunctionNode.setSynthetic(true); + irFunctionNode.setMaxLoopCounter(0); - classNode.addFunctionNode(functionNode); + irClassNode.addFunctionNode(irFunctionNode); - blockNode = new BlockNode(); - blockNode.setLocation(internalLocation); - blockNode.setAllEscape(true); - blockNode.setStatementCount(1); + irBlockNode = new BlockNode(); + irBlockNode.setLocation(internalLocation); + irBlockNode.setAllEscape(true); + irBlockNode.setStatementCount(1); - functionNode.setBlockNode(blockNode); + irFunctionNode.setBlockNode(irBlockNode); - returnNode = new ReturnNode(); - returnNode.setLocation(internalLocation); + irReturnNode = new ReturnNode(); + irReturnNode.setLocation(internalLocation); - blockNode.addStatementNode(returnNode); + irBlockNode.addStatementNode(irReturnNode); - memberFieldLoadNode = new MemberFieldLoadNode(); - memberFieldLoadNode.setLocation(internalLocation); - memberFieldLoadNode.setExpressionType(BitSet.class); - memberFieldLoadNode.setName("$STATEMENTS"); - memberFieldLoadNode.setStatic(true); + irMemberFieldLoadNode = new MemberFieldLoadNode(); + irMemberFieldLoadNode.setLocation(internalLocation); + irMemberFieldLoadNode.setExpressionType(BitSet.class); + irMemberFieldLoadNode.setName("$STATEMENTS"); + irMemberFieldLoadNode.setStatic(true); - returnNode.setExpressionNode(memberFieldLoadNode); + irReturnNode.setExpressionNode(irMemberFieldLoadNode); } // convert gets methods to a new set of inserted ir nodes as necessary - // requires the gets method name be modified from "getExample" to "example" // if a get method variable isn't used it's declaration node is removed from // the ir tree permanently so there is no frivolous variable slotting - protected void injectGetsDeclarations(BlockNode blockNode, ScriptScope scriptScope) { + protected void injectGetsDeclarations(BlockNode irBlockNode, ScriptScope scriptScope) { Location internalLocation = new Location("$internal$ScriptInjectionPhase$injectGetsDeclarations", 0); for (int i = 0; i < scriptScope.getScriptClassInfo().getGetMethods().size(); ++i) { @@ -290,18 +289,18 @@ protected void injectGetsDeclarations(BlockNode blockNode, ScriptScope scriptSco if (scriptScope.getUsedVariables().contains(name)) { Class returnType = scriptScope.getScriptClassInfo().getGetReturns().get(i); - DeclarationNode declarationNode = new DeclarationNode(); - declarationNode.setLocation(internalLocation); - declarationNode.setName(name); - declarationNode.setDeclarationType(returnType); - blockNode.getStatementsNodes().add(0, declarationNode); + DeclarationNode irDeclarationNode = new DeclarationNode(); + irDeclarationNode.setLocation(internalLocation); + irDeclarationNode.setName(name); + irDeclarationNode.setDeclarationType(returnType); + irBlockNode.getStatementsNodes().add(0, irDeclarationNode); - MemberCallNode memberCallNode = new MemberCallNode(); - memberCallNode.setLocation(internalLocation); - memberCallNode.setExpressionType(declarationNode.getDeclarationType()); - memberCallNode.setLocalFunction(new LocalFunction( + MemberCallNode irMemberCallNode = new MemberCallNode(); + irMemberCallNode.setLocation(internalLocation); + irMemberCallNode.setExpressionType(irDeclarationNode.getDeclarationType()); + irMemberCallNode.setLocalFunction(new LocalFunction( getMethod.getName(), returnType, Collections.emptyList(), true, false)); - declarationNode.setExpressionNode(memberCallNode); + irDeclarationNode.setExpressionNode(irMemberCallNode); } } } @@ -315,35 +314,35 @@ protected void injectNeedsMethods(ScriptScope scriptScope) { name = name.substring(5); name = Character.toLowerCase(name.charAt(0)) + name.substring(1); - FunctionNode functionNode = new FunctionNode(); - functionNode.setLocation(internalLocation); - functionNode.setName(needsMethod.getName()); - functionNode.setReturnType(boolean.class); - functionNode.setStatic(false); - functionNode.setVarArgs(false); - functionNode.setSynthetic(true); - functionNode.setMaxLoopCounter(0); + FunctionNode irFunctionNode = new FunctionNode(); + irFunctionNode.setLocation(internalLocation); + irFunctionNode.setName(needsMethod.getName()); + irFunctionNode.setReturnType(boolean.class); + irFunctionNode.setStatic(false); + irFunctionNode.setVarArgs(false); + irFunctionNode.setSynthetic(true); + irFunctionNode.setMaxLoopCounter(0); - irClassNode.addFunctionNode(functionNode); + irClassNode.addFunctionNode(irFunctionNode); - BlockNode blockNode = new BlockNode(); - blockNode.setLocation(internalLocation); - blockNode.setAllEscape(true); - blockNode.setStatementCount(1); + BlockNode irBlockNode = new BlockNode(); + irBlockNode.setLocation(internalLocation); + irBlockNode.setAllEscape(true); + irBlockNode.setStatementCount(1); - functionNode.setBlockNode(blockNode); + irFunctionNode.setBlockNode(irBlockNode); - ReturnNode returnNode = new ReturnNode(); - returnNode.setLocation(internalLocation); + ReturnNode irReturnNode = new ReturnNode(); + irReturnNode.setLocation(internalLocation); - blockNode.addStatementNode(returnNode); + irBlockNode.addStatementNode(irReturnNode); - ConstantNode constantNode = new ConstantNode(); - constantNode.setLocation(internalLocation); - constantNode.setExpressionType(boolean.class); - constantNode.setConstant(scriptScope.getUsedVariables().contains(name)); + ConstantNode irConstantNode = new ConstantNode(); + irConstantNode.setLocation(internalLocation); + irConstantNode.setExpressionType(boolean.class); + irConstantNode.setConstant(scriptScope.getUsedVariables().contains(name)); - returnNode.setExpressionNode(constantNode); + irReturnNode.setExpressionNode(irConstantNode); } } @@ -356,38 +355,38 @@ protected void injectNeedsMethods(ScriptScope scriptScope) { // } catch (PainlessError | BootstrapMethodError | OutOfMemoryError | StackOverflowError | Exception e) { // throw this.convertToScriptException(e, e.getHeaders()) // } - protected void injectSandboxExceptions(FunctionNode functionNode) { + protected void injectSandboxExceptions(FunctionNode irFunctionNode) { try { Location internalLocation = new Location("$internal$ScriptInjectionPhase$injectSandboxExceptions", 0); - BlockNode blockNode = functionNode.getBlockNode(); + BlockNode irBlockNode = irFunctionNode.getBlockNode(); - TryNode tryNode = new TryNode(); - tryNode.setLocation(internalLocation); - tryNode.setBlockNode(blockNode); + TryNode irTryNode = new TryNode(); + irTryNode.setLocation(internalLocation); + irTryNode.setBlockNode(irBlockNode); - CatchNode catchNode = new CatchNode(); - catchNode.setLocation(internalLocation); - catchNode.setExceptionType(PainlessExplainError.class); - catchNode.setSymbol("#painlessExplainError"); + CatchNode irCatchNode = new CatchNode(); + irCatchNode.setLocation(internalLocation); + irCatchNode.setExceptionType(PainlessExplainError.class); + irCatchNode.setSymbol("#painlessExplainError"); - tryNode.addCatchNode(catchNode); + irTryNode.addCatchNode(irCatchNode); - BlockNode catchBlockNode = new BlockNode(); - catchBlockNode.setLocation(internalLocation); - catchBlockNode.setAllEscape(true); - catchBlockNode.setStatementCount(1); + BlockNode irCatchBlockNode = new BlockNode(); + irCatchBlockNode.setLocation(internalLocation); + irCatchBlockNode.setAllEscape(true); + irCatchBlockNode.setStatementCount(1); - catchNode.setBlockNode(catchBlockNode); + irCatchNode.setBlockNode(irCatchBlockNode); - ThrowNode throwNode = new ThrowNode(); - throwNode.setLocation(internalLocation); + ThrowNode irThrowNode = new ThrowNode(); + irThrowNode.setLocation(internalLocation); - catchBlockNode.addStatementNode(throwNode); + irCatchBlockNode.addStatementNode(irThrowNode); - MemberCallNode memberCallNode = new MemberCallNode(); - memberCallNode.setLocation(internalLocation); - memberCallNode.setExpressionType(ScriptException.class); - memberCallNode.setLocalFunction( + MemberCallNode irMemberCallNode = new MemberCallNode(); + irMemberCallNode.setLocation(internalLocation); + irMemberCallNode.setExpressionType(ScriptException.class); + irMemberCallNode.setLocalFunction( new LocalFunction( "convertToScriptException", ScriptException.class, @@ -397,33 +396,33 @@ protected void injectSandboxExceptions(FunctionNode functionNode) { ) ); - throwNode.setExpressionNode(memberCallNode); + irThrowNode.setExpressionNode(irMemberCallNode); - VariableNode variableNode = new VariableNode(); - variableNode.setLocation(internalLocation); - variableNode.setExpressionType(ScriptException.class); - variableNode.setName("#painlessExplainError"); + VariableNode irVariableNode = new VariableNode(); + irVariableNode.setLocation(internalLocation); + irVariableNode.setExpressionType(ScriptException.class); + irVariableNode.setName("#painlessExplainError"); - memberCallNode.addArgumentNode(variableNode); + irMemberCallNode.addArgumentNode(irVariableNode); - CallNode callNode = new CallNode(); - callNode.setLocation(internalLocation); - callNode.setExpressionType(Map.class); + AccessNode irAccessNode = new AccessNode(); + irAccessNode.setLocation(internalLocation); + irAccessNode.setExpressionType(Map.class); - memberCallNode.addArgumentNode(callNode); + irMemberCallNode.addArgumentNode(irAccessNode); - variableNode = new VariableNode(); - variableNode.setLocation(internalLocation); - variableNode.setExpressionType(PainlessExplainError.class); - variableNode.setName("#painlessExplainError"); + irVariableNode = new VariableNode(); + irVariableNode.setLocation(internalLocation); + irVariableNode.setExpressionType(PainlessExplainError.class); + irVariableNode.setName("#painlessExplainError"); - callNode.setLeftNode(variableNode); + irAccessNode.setLeftNode(irVariableNode); - CallSubNode callSubNode = new CallSubNode(); - callSubNode.setLocation(internalLocation); - callSubNode.setExpressionType(Map.class); - callSubNode.setBox(PainlessExplainError.class); - callSubNode.setMethod( + CallSubNode irCallSubNode = new CallSubNode(); + irCallSubNode.setLocation(internalLocation); + irCallSubNode.setExpressionType(Map.class); + irCallSubNode.setBox(PainlessExplainError.class); + irCallSubNode.setMethod( new PainlessMethod( PainlessExplainError.class.getMethod( "getHeaders", @@ -437,15 +436,15 @@ protected void injectSandboxExceptions(FunctionNode functionNode) { ) ); - callNode.setRightNode(callSubNode); + irAccessNode.setRightNode(irCallSubNode); - MemberFieldLoadNode memberFieldLoadNode = new MemberFieldLoadNode(); - memberFieldLoadNode.setLocation(internalLocation); - memberFieldLoadNode.setExpressionType(PainlessLookup.class); - memberFieldLoadNode.setName("$DEFINITION"); - memberFieldLoadNode.setStatic(true); + MemberFieldLoadNode irMemberFieldLoadNode = new MemberFieldLoadNode(); + irMemberFieldLoadNode.setLocation(internalLocation); + irMemberFieldLoadNode.setExpressionType(PainlessLookup.class); + irMemberFieldLoadNode.setName("$DEFINITION"); + irMemberFieldLoadNode.setStatic(true); - callSubNode.addArgumentNode(memberFieldLoadNode); + irCallSubNode.addArgumentNode(irMemberFieldLoadNode); for (Class throwable : new Class[] { PainlessError.class, BootstrapMethodError.class, OutOfMemoryError.class, StackOverflowError.class, Exception.class}) { @@ -453,29 +452,29 @@ protected void injectSandboxExceptions(FunctionNode functionNode) { String name = throwable.getSimpleName(); name = "#" + Character.toLowerCase(name.charAt(0)) + name.substring(1); - catchNode = new CatchNode(); - catchNode.setLocation(internalLocation); - catchNode.setExceptionType(throwable); - catchNode.setSymbol(name); + irCatchNode = new CatchNode(); + irCatchNode.setLocation(internalLocation); + irCatchNode.setExceptionType(throwable); + irCatchNode.setSymbol(name); - tryNode.addCatchNode(catchNode); + irTryNode.addCatchNode(irCatchNode); - catchBlockNode = new BlockNode(); - catchBlockNode.setLocation(internalLocation); - catchBlockNode.setAllEscape(true); - catchBlockNode.setStatementCount(1); + irCatchBlockNode = new BlockNode(); + irCatchBlockNode.setLocation(internalLocation); + irCatchBlockNode.setAllEscape(true); + irCatchBlockNode.setStatementCount(1); - catchNode.setBlockNode(catchBlockNode); + irCatchNode.setBlockNode(irCatchBlockNode); - throwNode = new ThrowNode(); - throwNode.setLocation(internalLocation); + irThrowNode = new ThrowNode(); + irThrowNode.setLocation(internalLocation); - catchBlockNode.addStatementNode(throwNode); + irCatchBlockNode.addStatementNode(irThrowNode); - memberCallNode = new MemberCallNode(); - memberCallNode.setLocation(internalLocation); - memberCallNode.setExpressionType(ScriptException.class); - memberCallNode.setLocalFunction( + irMemberCallNode = new MemberCallNode(); + irMemberCallNode.setLocation(internalLocation); + irMemberCallNode.setExpressionType(ScriptException.class); + irMemberCallNode.setLocalFunction( new LocalFunction( "convertToScriptException", ScriptException.class, @@ -485,32 +484,32 @@ protected void injectSandboxExceptions(FunctionNode functionNode) { ) ); - throwNode.setExpressionNode(memberCallNode); + irThrowNode.setExpressionNode(irMemberCallNode); - variableNode = new VariableNode(); - variableNode.setLocation(internalLocation); - variableNode.setExpressionType(ScriptException.class); - variableNode.setName(name); + irVariableNode = new VariableNode(); + irVariableNode.setLocation(internalLocation); + irVariableNode.setExpressionType(ScriptException.class); + irVariableNode.setName(name); - memberCallNode.addArgumentNode(variableNode); + irMemberCallNode.addArgumentNode(irVariableNode); - callNode = new CallNode(); - callNode.setLocation(internalLocation); - callNode.setExpressionType(Map.class); + irAccessNode = new AccessNode(); + irAccessNode.setLocation(internalLocation); + irAccessNode.setExpressionType(Map.class); - memberCallNode.addArgumentNode(callNode); + irMemberCallNode.addArgumentNode(irAccessNode); - StaticNode staticNode = new StaticNode(); - staticNode.setLocation(internalLocation); - staticNode.setExpressionType(Collections.class); + StaticNode irStaticNode = new StaticNode(); + irStaticNode.setLocation(internalLocation); + irStaticNode.setExpressionType(Collections.class); - callNode.setLeftNode(staticNode); + irAccessNode.setLeftNode(irStaticNode); - callSubNode = new CallSubNode(); - callSubNode.setLocation(internalLocation); - callSubNode.setExpressionType(Map.class); - callSubNode.setBox(Collections.class); - callSubNode.setMethod( + irCallSubNode = new CallSubNode(); + irCallSubNode.setLocation(internalLocation); + irCallSubNode.setExpressionType(Map.class); + irCallSubNode.setBox(Collections.class); + irCallSubNode.setMethod( new PainlessMethod( Collections.class.getMethod("emptyMap"), Collections.class, @@ -522,16 +521,16 @@ protected void injectSandboxExceptions(FunctionNode functionNode) { ) ); - callNode.setRightNode(callSubNode); + irAccessNode.setRightNode(irCallSubNode); } - blockNode = new BlockNode(); - blockNode.setLocation(blockNode.getLocation()); - blockNode.setAllEscape(blockNode.doAllEscape()); - blockNode.setStatementCount(blockNode.getStatementCount()); - blockNode.addStatementNode(tryNode); + irBlockNode = new BlockNode(); + irBlockNode.setLocation(irBlockNode.getLocation()); + irBlockNode.setAllEscape(irBlockNode.doAllEscape()); + irBlockNode.setStatementCount(irBlockNode.getStatementCount()); + irBlockNode.addStatementNode(irTryNode); - functionNode.setBlockNode(blockNode); + irFunctionNode.setBlockNode(irBlockNode); } catch (Exception exception) { throw new RuntimeException(exception); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 3a4a263a84b76..af4a0e0bc9530 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -303,7 +303,7 @@ private void testCase(Query query, IndexSearcher indexSearcher, Consumer tasks.register('packagingTest') { - dependsOn 'distroTest', 'batsTest.upgrade' + dependsOn 'distroTest' } vagrant { diff --git a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index 626e0bb9e1144..c42806140fedc 100644 --- a/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/os/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -37,7 +37,6 @@ public static void filterDistros() { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/58730") public void test05CheckLintian() { String extraArgs = ""; if (sh.run("lintian --help").stdout.contains("fail-on-warnings")) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java index ad74d90006731..9e2b56e97dcc5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/MockDiskUsagesIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.monitor.fs.FsInfo; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.nio.file.Path; import java.util.Arrays; @@ -78,6 +79,9 @@ private static FsInfo.Path setDiskUsage(FsInfo.Path original, long totalBytes, l return new FsInfo.Path(original.getPath(), original.getMount(), totalBytes, freeBytes, freeBytes); } + @TestLogging(reason="https://github.com/elastic/elasticsearch/issues/60587", + value="org.elasticsearch.cluster.InternalClusterInfoService:TRACE," + + "org.elasticsearch.cluster.routing.allocation.DiskThresholdMonitor:TRACE") public void testRerouteOccursOnDiskPassingHighWatermark() throws Exception { for (int i = 0; i < 3; i++) { // ensure that each node has a single data path diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 95547df46454c..2afe252c79a5d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.time.DateMathParser; import org.elasticsearch.common.time.DateUtils; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.LocaleUtils; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; @@ -94,6 +95,11 @@ public Instant clampToValidRange(Instant instant) { public long parsePointAsMillis(byte[] value) { return LongPoint.decodeDimension(value, 0); } + + @Override + protected Query distanceFeatureQuery(String field, float boost, long origin, TimeValue pivot) { + return LongPoint.newDistanceFeatureQuery(field, boost, origin, pivot.getMillis()); + } }, NANOSECONDS(DATE_NANOS_CONTENT_TYPE, NumericType.DATE_NANOSECONDS) { @Override @@ -115,6 +121,11 @@ public Instant clampToValidRange(Instant instant) { public long parsePointAsMillis(byte[] value) { return DateUtils.toMilliSeconds(LongPoint.decodeDimension(value, 0)); } + + @Override + protected Query distanceFeatureQuery(String field, float boost, long origin, TimeValue pivot) { + return LongPoint.newDistanceFeatureQuery(field, boost, origin, pivot.getNanos()); + } }; private final String type; @@ -162,6 +173,8 @@ public static Resolution ofOrdinal(int ord) { } throw new IllegalArgumentException("unknown resolution ordinal [" + ord + "]"); } + + protected abstract Query distanceFeatureQuery(String field, float boost, long origin, TimeValue pivot); } private static DateFieldMapper toType(FieldMapper in) { @@ -385,6 +398,13 @@ public static long parseToLong( return resolution.convert(dateParser.parse(BytesRefs.toString(value), now, roundUp, zone)); } + @Override + public Query distanceFeatureQuery(Object origin, String pivot, float boost, QueryShardContext context) { + long originLong = parseToLong(origin, true, null, null, context::nowInMillis); + TimeValue pivotTime = TimeValue.parseTimeValue(pivot, "distance_feature.pivot"); + return resolution.distanceFeatureQuery(name(), boost, originLong, pivotTime); + } + @Override public Relation isFieldWithinQuery(IndexReader reader, Object from, Object to, boolean includeLower, boolean includeUpper, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 30b8999e10c99..fa6b510ccd0aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -24,15 +24,18 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.geometry.Point; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointIndexFieldData; import org.elasticsearch.index.mapper.GeoPointFieldMapper.ParsedGeoPoint; +import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.VectorGeoPointShapeQueryProcessor; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.lookup.SearchLookup; @@ -188,6 +191,20 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S return new AbstractLatLonPointIndexFieldData.Builder(name(), CoreValuesSourceType.GEOPOINT); } + @Override + public Query distanceFeatureQuery(Object origin, String pivot, float boost, QueryShardContext context) { + GeoPoint originGeoPoint; + if (origin instanceof GeoPoint) { + originGeoPoint = (GeoPoint) origin; + } else if (origin instanceof String) { + originGeoPoint = GeoUtils.parseFromString((String) origin); + } else { + throw new IllegalArgumentException("Illegal type ["+ origin.getClass() + "] for [origin]! " + + "Must be of type [geo_point] or [string] for geo_point fields!"); + } + double pivotDouble = DistanceUnit.DEFAULT.parse(pivot, DistanceUnit.DEFAULT); + return LatLonPoint.newDistanceFeatureQuery(name(), boost, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble); + } } // Eclipse requires the AbstractPointGeometryFieldMapper prefix or it can't find ParsedPoint diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index e4cfa9ffe4870..de8ef07c75fb3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -43,6 +43,7 @@ import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.query.DistanceFeatureQueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; @@ -252,6 +253,11 @@ public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRew + "] which is of type [" + typeName() + "]"); } + public Query distanceFeatureQuery(Object origin, String pivot, float boost, QueryShardContext context) { + throw new IllegalArgumentException("Illegal data type of [" + typeName() + "]!"+ + "[" + DistanceFeatureQueryBuilder.NAME + "] query can only be run on a date, date_nanos or geo_point field type!"); + } + /** * Create an {@link IntervalsSource} to be used for proximity queries */ diff --git a/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java index e06a3201e7c11..9001709460511 100644 --- a/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/DistanceFeatureQueryBuilder.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.query; -import org.apache.lucene.document.LatLonPoint; -import org.apache.lucene.document.LongPoint; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -29,15 +27,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.DateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -119,31 +112,7 @@ protected Query doToQuery(QueryShardContext context) throws IOException { if (fieldType == null) { return Queries.newMatchNoDocsQuery("Can't run [" + NAME + "] query on unmapped fields!"); } - Object originObj = origin.origin(); - // TODO these ain't gonna work with runtime fields - if (fieldType instanceof DateFieldType) { - long originLong = ((DateFieldType) fieldType).parseToLong(originObj, true, null, null, context::nowInMillis); - TimeValue pivotVal = TimeValue.parseTimeValue(pivot, DistanceFeatureQueryBuilder.class.getSimpleName() + ".pivot"); - if (((DateFieldType) fieldType).resolution() == DateFieldMapper.Resolution.MILLISECONDS) { - return LongPoint.newDistanceFeatureQuery(field, boost, originLong, pivotVal.getMillis()); - } else { // NANOSECONDS - return LongPoint.newDistanceFeatureQuery(field, boost, originLong, pivotVal.getNanos()); - } - } else if (fieldType instanceof GeoPointFieldType) { - GeoPoint originGeoPoint; - if (originObj instanceof GeoPoint) { - originGeoPoint = (GeoPoint) originObj; - } else if (originObj instanceof String) { - originGeoPoint = GeoUtils.parseFromString((String) originObj); - } else { - throw new IllegalArgumentException("Illegal type ["+ origin.getClass() + "] for [origin]! " + - "Must be of type [geo_point] or [string] for geo_point fields!"); - } - double pivotDouble = DistanceUnit.DEFAULT.parse(pivot, DistanceUnit.DEFAULT); - return LatLonPoint.newDistanceFeatureQuery(field, boost, originGeoPoint.lat(), originGeoPoint.lon(), pivotDouble); - } - throw new IllegalArgumentException("Illegal data type of [" + fieldType.typeName() + "]!"+ - "[" + NAME + "] query can only be run on a date, date_nanos or geo_point field type!"); + return fieldType.distanceFeatureQuery(origin.origin(), pivot, boost, context); } String fieldName() { diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index b3c2a7c729e00..18dd2209f7a1a 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -114,13 +114,13 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.histogram.VariableWidthHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram; -import org.elasticsearch.search.aggregations.bucket.histogram.InternalVariableWidthHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalVariableWidthHistogram; +import org.elasticsearch.search.aggregations.bucket.histogram.VariableWidthHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.nested.InternalNested; @@ -468,8 +468,12 @@ private ValuesSourceRegistry registerAggregations(List plugins) { .setAggregatorRegistrar(GeoCentroidAggregationBuilder::registerAggregators), builder); registerAggregation(new AggregationSpec(ScriptedMetricAggregationBuilder.NAME, ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder.PARSER).addResultReader(InternalScriptedMetric::new), builder); - registerAggregation((new AggregationSpec(CompositeAggregationBuilder.NAME, CompositeAggregationBuilder::new, - CompositeAggregationBuilder.PARSER).addResultReader(InternalComposite::new)), builder); + registerAggregation( + new AggregationSpec(CompositeAggregationBuilder.NAME, CompositeAggregationBuilder::new, CompositeAggregationBuilder.PARSER) + .addResultReader(InternalComposite::new) + .setAggregatorRegistrar(CompositeAggregationBuilder::registerAggregators), + builder + ); registerFromPlugin(plugins, SearchPlugin::getAggregations, (agg) -> this.registerAggregation(agg, builder)); // after aggs have been registered, see if there are any new VSTypes that need to be linked to core fields diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index e3c89999d85c4..d92c24793d16a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import java.io.IOException; import java.util.ArrayList; @@ -61,6 +62,14 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder p.map(), AFTER_FIELD_NAME); } + public static void registerAggregators(ValuesSourceRegistry.Builder builder) { + DateHistogramValuesSourceBuilder.register(builder); + HistogramValuesSourceBuilder.register(builder); + GeoTileGridValuesSourceBuilder.register(builder); + TermsValuesSourceBuilder.register(builder); + builder.registerUsage(NAME); + } + private List> sources; private Map after; private int size = 10; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 2f9762f426b10..ecb39acfb40d2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -19,9 +19,7 @@ package org.elasticsearch.search.aggregations.bucket.composite; -import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; @@ -44,21 +42,18 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.RoaringDocIdSet; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSortConfig; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.BucketCollector; +import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketCollector; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; -import org.elasticsearch.search.aggregations.bucket.geogrid.CellIdSource; -import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.searchafter.SearchAfterBuilder; import org.elasticsearch.search.sort.SortAndFormats; @@ -110,7 +105,12 @@ final class CompositeAggregator extends BucketsAggregator { } this.sourceConfigs = sourceConfigs; for (int i = 0; i < sourceConfigs.length; i++) { - this.sources[i] = createValuesSource(context.bigArrays(), context.searcher().getIndexReader(), sourceConfigs[i], size); + this.sources[i] = sourceConfigs[i].createValuesSource( + context.bigArrays(), + context.searcher().getIndexReader(), + size, + this::addRequestCircuitBreakerBytes + ); } this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size, rawAfterKey); this.rawAfterKey = rawAfterKey; @@ -495,81 +495,6 @@ public void collect(int doc, long zeroBucket) throws IOException { }; } - private SingleDimensionValuesSource createValuesSource(BigArrays bigArrays, IndexReader reader, - CompositeValuesSourceConfig config, int size) { - final int reverseMul = config.reverseMul(); - if (config.valuesSource() instanceof ValuesSource.Bytes.WithOrdinals && reader instanceof DirectoryReader) { - ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) config.valuesSource(); - return new GlobalOrdinalValuesSource( - bigArrays, - config.fieldType(), - vs::globalOrdinalsValues, - config.format(), - config.missingBucket(), - size, - reverseMul - ); - } else if (config.valuesSource() instanceof ValuesSource.Bytes) { - ValuesSource.Bytes vs = (ValuesSource.Bytes) config.valuesSource(); - return new BinaryValuesSource( - bigArrays, - this::addRequestCircuitBreakerBytes, - config.fieldType(), - vs::bytesValues, - config.format(), - config.missingBucket(), - size, - reverseMul - ); - - } else if (config.valuesSource() instanceof CellIdSource) { - final CellIdSource cis = (CellIdSource) config.valuesSource(); - return new GeoTileValuesSource( - bigArrays, - config.fieldType(), - cis::longValues, - LongUnaryOperator.identity(), - config.format(), - config.missingBucket(), - size, - reverseMul); - } else if (config.valuesSource() instanceof ValuesSource.Numeric) { - final ValuesSource.Numeric vs = (ValuesSource.Numeric) config.valuesSource(); - if (vs.isFloatingPoint()) { - return new DoubleValuesSource( - bigArrays, - config.fieldType(), - vs::doubleValues, - config.format(), - config.missingBucket(), - size, - reverseMul - ); - - } else { - final LongUnaryOperator rounding; - if (vs instanceof RoundingValuesSource) { - rounding = ((RoundingValuesSource) vs)::round; - } else { - rounding = LongUnaryOperator.identity(); - } - return new LongValuesSource( - bigArrays, - config.fieldType(), - vs::longValues, - rounding, - config.format(), - config.missingBucket(), - size, - reverseMul - ); - } - } else { - throw new IllegalArgumentException("Unknown values source type: " + config.valuesSource().getClass().getName() + - " for source: " + config.name()); - } - } - private static class Entry { final LeafReaderContext context; final DocIdSet docIdSet; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java index 86acc021cda04..f8694b4f22326 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceBuilder.java @@ -26,10 +26,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.script.Script; -import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; @@ -44,18 +44,13 @@ public abstract class CompositeValuesSourceBuilder createValuesSource( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes, + CompositeValuesSourceConfig config + ); + } + private final String name; @Nullable private final MappedFieldType fieldType; @@ -34,6 +50,7 @@ class CompositeValuesSourceConfig { private final int reverseMul; private final boolean missingBucket; private final boolean hasScript; + private final SingleDimensionValuesSourceProvider singleDimensionValuesSourceProvider; /** * Creates a new {@link CompositeValuesSourceConfig}. @@ -46,8 +63,16 @@ class CompositeValuesSourceConfig { * @param missingBucket If true an explicit null bucket will represent documents with missing values. * @param hasScript true if the source contains a script that can change the value. */ - CompositeValuesSourceConfig(String name, @Nullable MappedFieldType fieldType, ValuesSource vs, DocValueFormat format, - SortOrder order, boolean missingBucket, boolean hasScript) { + CompositeValuesSourceConfig( + String name, + @Nullable MappedFieldType fieldType, + ValuesSource vs, + DocValueFormat format, + SortOrder order, + boolean missingBucket, + boolean hasScript, + SingleDimensionValuesSourceProvider singleDimensionValuesSourceProvider + ) { this.name = name; this.fieldType = fieldType; this.vs = vs; @@ -55,6 +80,7 @@ class CompositeValuesSourceConfig { this.reverseMul = order == SortOrder.ASC ? 1 : -1; this.missingBucket = missingBucket; this.hasScript = hasScript; + this.singleDimensionValuesSourceProvider = singleDimensionValuesSourceProvider; } /** @@ -107,4 +133,13 @@ int reverseMul() { assert reverseMul == -1 || reverseMul == 1; return reverseMul; } + + SingleDimensionValuesSource createValuesSource( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes + ) { + return this.singleDimensionValuesSourceProvider.createValuesSource(bigArrays, reader, size, addRequestCircuitBreakerBytes, this); + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java index 5ca9fcd2d3b80..f8f7042e1a936 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesSourceParserHelper.java @@ -38,20 +38,13 @@ public class CompositeValuesSourceParserHelper { - static , T> void declareValuesSourceFields(AbstractObjectParser objectParser, - ValueType expectedValueType) { + static , T> void declareValuesSourceFields(AbstractObjectParser objectParser) { objectParser.declareField(VB::field, XContentParser::text, new ParseField("field"), ObjectParser.ValueType.STRING); objectParser.declareBoolean(VB::missingBucket, new ParseField("missing_bucket")); - objectParser.declareField(VB::valueType, p -> { + objectParser.declareField(VB::userValuetypeHint, p -> { ValueType valueType = ValueType.lenientParse(p.text()); - if (expectedValueType != null && valueType.isNotA(expectedValueType)) { - throw new ParsingException(p.getTokenLocation(), - "Aggregation [" + objectParser.getName() + "] was configured with an incompatible value type [" - + valueType + "]. It can only work on value of type [" - + expectedValueType + "]"); - } return valueType; }, new ParseField("value_type"), ObjectParser.ValueType.STRING); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index 5592fefb27a38..aae5d4b9669e9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -19,12 +19,14 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.apache.lucene.index.IndexReader; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -37,14 +39,19 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateIntervalConsumer; import org.elasticsearch.search.aggregations.bucket.histogram.DateIntervalWrapper; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.time.ZoneId; import java.time.ZoneOffset; +import java.util.List; import java.util.Objects; +import java.util.function.LongConsumer; /** * A {@link CompositeValuesSourceBuilder} that builds a {@link RoundingValuesSource} from a {@link Script} or @@ -52,7 +59,24 @@ */ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuilder implements DateIntervalConsumer { + @FunctionalInterface + public interface DateHistogramCompositeSupplier extends ValuesSourceRegistry.CompositeSupplier { + CompositeValuesSourceConfig apply( + ValuesSourceConfig config, + Rounding rounding, + String name, + boolean hasScript, // probably redundant with the config, but currently we check this two different ways... + String format, + boolean missingBucket, + SortOrder order + ); + } + static final String TYPE = "date_histogram"; + static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + TYPE, + DateHistogramCompositeSupplier.class + ); static final ObjectParser PARSER = ObjectParser.fromBuilder(TYPE, DateHistogramValuesSourceBuilder::new); @@ -73,7 +97,7 @@ public class DateHistogramValuesSourceBuilder return ZoneOffset.ofHours(p.intValue()); } }, new ParseField("time_zone"), ObjectParser.ValueType.LONG); - CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC); + CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER); } private ZoneId timeZone = null; @@ -81,7 +105,7 @@ public class DateHistogramValuesSourceBuilder private long offset = 0; public DateHistogramValuesSourceBuilder(String name) { - super(name, ValueType.DATE); + super(name); } protected DateHistogramValuesSourceBuilder(StreamInput in) throws IOException { @@ -246,25 +270,60 @@ public DateHistogramValuesSourceBuilder offset(long offset) { return this; } + public static void register(ValuesSourceRegistry.Builder builder) { + builder.registerComposite( + REGISTRY_KEY, + List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.NUMERIC), + (valuesSourceConfig, rounding, name, hasScript, format, missingBucket, order) -> { + ValuesSource.Numeric numeric = (ValuesSource.Numeric) valuesSourceConfig.getValuesSource(); + // TODO once composite is plugged in to the values source registry or at least understands Date values source types use it + // here + Rounding.Prepared preparedRounding = rounding.prepareForUnknown(); + RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding); + // is specified in the builder. + final DocValueFormat docValueFormat = format == null ? DocValueFormat.RAW : valuesSourceConfig.format(); + final MappedFieldType fieldType = valuesSourceConfig.fieldType(); + return new CompositeValuesSourceConfig( + name, + fieldType, + vs, + docValueFormat, + order, + missingBucket, + hasScript, + ( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes, + CompositeValuesSourceConfig compositeValuesSourceConfig) -> { + final RoundingValuesSource roundingValuesSource = (RoundingValuesSource) compositeValuesSourceConfig.valuesSource(); + return new LongValuesSource( + bigArrays, + compositeValuesSourceConfig.fieldType(), + roundingValuesSource::longValues, + roundingValuesSource::round, + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + } + ); + } + ); + } + + @Override + protected ValuesSourceType getDefaultValuesSourceType() { + return CoreValuesSourceType.DATE; + } + @Override protected CompositeValuesSourceConfig innerBuild(QueryShardContext queryShardContext, ValuesSourceConfig config) throws IOException { Rounding rounding = dateHistogramInterval.createRounding(timeZone(), offset); - ValuesSource orig = config.hasValues() ? config.getValuesSource() : null; - if (orig == null) { - orig = ValuesSource.Numeric.EMPTY; - } - if (orig instanceof ValuesSource.Numeric) { - ValuesSource.Numeric numeric = (ValuesSource.Numeric) orig; - // TODO once composite is plugged in to the values source registry or at least understands Date values source types use it here - Rounding.Prepared preparedRounding = rounding.prepareForUnknown(); - RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding); - // is specified in the builder. - final DocValueFormat docValueFormat = format() == null ? DocValueFormat.RAW : config.format(); - final MappedFieldType fieldType = config.fieldType(); - return new CompositeValuesSourceConfig(name, fieldType, vs, docValueFormat, order(), - missingBucket(), config.script() != null); - } else { - throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName()); - } + return queryShardContext.getValuesSourceRegistry() + .getComposite(REGISTRY_KEY, config) + .apply(config, rounding, name, config.script() != null, format(), missingBucket(), order()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java index b1ea58a0d3d0d..aed87fa594529 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java @@ -19,12 +19,14 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.apache.lucene.index.IndexReader; import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.GeoBoundingBox; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -34,15 +36,38 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.CellIdSource; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; -import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; import java.util.Objects; +import java.util.function.LongConsumer; +import java.util.function.LongUnaryOperator; public class GeoTileGridValuesSourceBuilder extends CompositeValuesSourceBuilder { + @FunctionalInterface + public interface GeoTileCompositeSuppier extends ValuesSourceRegistry.CompositeSupplier { + CompositeValuesSourceConfig apply( + ValuesSourceConfig config, + int precision, + GeoBoundingBox boundingBox, + String name, + boolean hasScript, // probably redundant with the config, but currently we check this two different ways... + String format, + boolean missingBucket, + SortOrder order + ); + } + static final String TYPE = "geotile_grid"; + static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey( + TYPE, + GeoTileCompositeSuppier.class + ); private static final ObjectParser PARSER; static { @@ -50,13 +75,61 @@ public class GeoTileGridValuesSourceBuilder extends CompositeValuesSourceBuilder PARSER.declareInt(GeoTileGridValuesSourceBuilder::precision, new ParseField("precision")); PARSER.declareField(((p, builder, context) -> builder.geoBoundingBox(GeoBoundingBox.parseBoundingBox(p))), GeoBoundingBox.BOUNDS_FIELD, ObjectParser.ValueType.OBJECT); - CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC); + CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER); } static GeoTileGridValuesSourceBuilder parse(String name, XContentParser parser) throws IOException { return PARSER.parse(parser, new GeoTileGridValuesSourceBuilder(name), null); } + static void register(ValuesSourceRegistry.Builder builder) { + + builder.registerComposite( + REGISTRY_KEY, + CoreValuesSourceType.GEOPOINT, + (valuesSourceConfig, precision, boundingBox, name, hasScript, format, missingBucket, order) -> { + ValuesSource.GeoPoint geoPoint = (ValuesSource.GeoPoint) valuesSourceConfig.getValuesSource(); + // is specified in the builder. + final MappedFieldType fieldType = valuesSourceConfig.fieldType(); + CellIdSource cellIdSource = new CellIdSource( + geoPoint, + precision, + boundingBox, + GeoTileUtils::longEncode + ); + return new CompositeValuesSourceConfig( + name, + fieldType, + cellIdSource, + DocValueFormat.GEOTILE, + order, + missingBucket, + hasScript, + ( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes, + CompositeValuesSourceConfig compositeValuesSourceConfig + + ) -> { + final CellIdSource cis = (CellIdSource) compositeValuesSourceConfig.valuesSource(); + return new GeoTileValuesSource( + bigArrays, + compositeValuesSourceConfig.fieldType(), + cis::longValues, + LongUnaryOperator.identity(), + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + } + ); + } + ); + } + private int precision = GeoTileGridAggregationBuilder.DEFAULT_PRECISION; private GeoBoundingBox geoBoundingBox = new GeoBoundingBox(new GeoPoint(Double.NaN, Double.NaN), new GeoPoint(Double.NaN, Double.NaN)); @@ -127,22 +200,16 @@ public boolean equals(Object obj) { && Objects.equals(geoBoundingBox, other.geoBoundingBox); } + @Override + protected ValuesSourceType getDefaultValuesSourceType() { + return CoreValuesSourceType.GEOPOINT; + } + @Override protected CompositeValuesSourceConfig innerBuild(QueryShardContext queryShardContext, ValuesSourceConfig config) throws IOException { - ValuesSource orig = config.hasValues() ? config.getValuesSource() : null; - if (orig == null) { - orig = ValuesSource.GeoPoint.EMPTY; - } - if (orig instanceof ValuesSource.GeoPoint) { - ValuesSource.GeoPoint geoPoint = (ValuesSource.GeoPoint) orig; - // is specified in the builder. - final MappedFieldType fieldType = config.fieldType(); - CellIdSource cellIdSource = new CellIdSource(geoPoint, precision, geoBoundingBox, GeoTileUtils::longEncode); - return new CompositeValuesSourceConfig(name, fieldType, cellIdSource, DocValueFormat.GEOTILE, order(), - missingBucket(), script() != null); - } else { - throw new IllegalArgumentException("invalid source, expected geo_point, got " + orig.getClass().getSimpleName()); - } + return queryShardContext.getValuesSourceRegistry() + .getComposite(REGISTRY_KEY, config) + .apply(config, precision, geoBoundingBox(), name, script() != null, format(), missingBucket(), order()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java index ce66c763fcc0a..3984ed1d02d10 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/HistogramValuesSourceBuilder.java @@ -19,42 +19,103 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.apache.lucene.index.IndexReader; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; -import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; +import java.util.List; import java.util.Objects; +import java.util.function.LongConsumer; /** * A {@link CompositeValuesSourceBuilder} that builds a {@link HistogramValuesSource} from another numeric values source * using the provided interval. */ public class HistogramValuesSourceBuilder extends CompositeValuesSourceBuilder { + @FunctionalInterface + public interface HistogramCompositeSupplier extends ValuesSourceRegistry.CompositeSupplier { + CompositeValuesSourceConfig apply( + ValuesSourceConfig config, + double interval, + String name, + boolean hasScript, // probably redundant with the config, but currently we check this two different ways... + String format, + boolean missingBucket, + SortOrder order + ); + } + static final String TYPE = "histogram"; + static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + TYPE, + HistogramCompositeSupplier.class + ); private static final ObjectParser PARSER; static { PARSER = new ObjectParser<>(HistogramValuesSourceBuilder.TYPE); PARSER.declareDouble(HistogramValuesSourceBuilder::interval, Histogram.INTERVAL_FIELD); - CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, ValueType.NUMERIC); + CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER); } static HistogramValuesSourceBuilder parse(String name, XContentParser parser) throws IOException { return PARSER.parse(parser, new HistogramValuesSourceBuilder(name), null); } + public static void register(ValuesSourceRegistry.Builder builder) { + builder.registerComposite( + REGISTRY_KEY, + List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.NUMERIC), + (valuesSourceConfig, interval, name, hasScript, format, missingBucket, order) -> { + ValuesSource.Numeric numeric = (ValuesSource.Numeric) valuesSourceConfig.getValuesSource(); + final HistogramValuesSource vs = new HistogramValuesSource(numeric, interval); + final MappedFieldType fieldType = valuesSourceConfig.fieldType(); + return new CompositeValuesSourceConfig( + name, + fieldType, + vs, + valuesSourceConfig.format(), + order, + missingBucket, + hasScript, + ( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes, + CompositeValuesSourceConfig compositeValuesSourceConfig) -> { + final ValuesSource.Numeric numericValuesSource = (ValuesSource.Numeric) compositeValuesSourceConfig.valuesSource(); + return new DoubleValuesSource( + bigArrays, + compositeValuesSourceConfig.fieldType(), + numericValuesSource::doubleValues, + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + } + ); + }); + } + private double interval = 0; public HistogramValuesSourceBuilder(String name) { - super(name, ValueType.DOUBLE); + super(name); } protected HistogramValuesSourceBuilder(StreamInput in) throws IOException { @@ -109,20 +170,15 @@ public HistogramValuesSourceBuilder interval(double interval) { return this; } + @Override + protected ValuesSourceType getDefaultValuesSourceType() { + return CoreValuesSourceType.NUMERIC; + } + @Override protected CompositeValuesSourceConfig innerBuild(QueryShardContext queryShardContext, ValuesSourceConfig config) throws IOException { - ValuesSource orig = config.hasValues() ? config.getValuesSource() : null; - if (orig == null) { - orig = ValuesSource.Numeric.EMPTY; - } - if (orig instanceof ValuesSource.Numeric) { - ValuesSource.Numeric numeric = (ValuesSource.Numeric) orig; - final HistogramValuesSource vs = new HistogramValuesSource(numeric, interval); - final MappedFieldType fieldType = config.fieldType(); - return new CompositeValuesSourceConfig(name, fieldType, vs, config.format(), order(), - missingBucket(), script() != null); - } else { - throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName()); - } + return queryShardContext.getValuesSourceRegistry() + .getComposite(REGISTRY_KEY, config) + .apply(config, interval, name, script() != null, format(), missingBucket(), order()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java index 74025131dadf3..1c8fc843c40c0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/TermsValuesSourceBuilder.java @@ -19,33 +19,58 @@ package org.elasticsearch.search.aggregations.bucket.composite; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.script.Script; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; -import org.elasticsearch.script.Script; +import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; +import java.util.List; +import java.util.function.LongConsumer; +import java.util.function.LongUnaryOperator; /** * A {@link CompositeValuesSourceBuilder} that builds a {@link ValuesSource} from a {@link Script} or * a field name. */ public class TermsValuesSourceBuilder extends CompositeValuesSourceBuilder { + + @FunctionalInterface + public interface TermsCompositeSupplier extends ValuesSourceRegistry.CompositeSupplier { + CompositeValuesSourceConfig apply( + ValuesSourceConfig config, + String name, + boolean hasScript, // probably redundant with the config, but currently we check this two different ways... + String format, + boolean missingBucket, + SortOrder order + ); + } static final String TYPE = "terms"; + static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = new ValuesSourceRegistry.RegistryKey<>( + TYPE, + TermsCompositeSupplier.class + ); private static final ObjectParser PARSER; static { PARSER = new ObjectParser<>(TermsValuesSourceBuilder.TYPE); - CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER, null); + CompositeValuesSourceParserHelper.declareValuesSourceFields(PARSER); } + static TermsValuesSourceBuilder parse(String name, XContentParser parser) throws IOException { return PARSER.parse(parser, new TermsValuesSourceBuilder(name), null); } @@ -69,22 +94,122 @@ public String type() { return TYPE; } + static void register(ValuesSourceRegistry.Builder builder) { + builder.registerComposite( + REGISTRY_KEY, + List.of(CoreValuesSourceType.DATE, CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN), + (valuesSourceConfig, name, hasScript, format, missingBucket, order) -> { + final DocValueFormat docValueFormat; + if (format == null && valuesSourceConfig.valueSourceType() == CoreValuesSourceType.DATE) { + // defaults to the raw format on date fields (preserve timestamp as longs). + docValueFormat = DocValueFormat.RAW; + } else { + docValueFormat = valuesSourceConfig.format(); + } + return new CompositeValuesSourceConfig( + name, + valuesSourceConfig.fieldType(), + valuesSourceConfig.getValuesSource(), + docValueFormat, + order, + missingBucket, + hasScript, + ( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes, + CompositeValuesSourceConfig compositeValuesSourceConfig) -> { + + final ValuesSource.Numeric vs = (ValuesSource.Numeric) compositeValuesSourceConfig.valuesSource(); + if (vs.isFloatingPoint()) { + return new DoubleValuesSource( + bigArrays, + compositeValuesSourceConfig.fieldType(), + vs::doubleValues, + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + + } else { + final LongUnaryOperator rounding; + rounding = LongUnaryOperator.identity(); + return new LongValuesSource( + bigArrays, + compositeValuesSourceConfig.fieldType(), + vs::longValues, + rounding, + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + } + + } + ); + } + ); + + builder.registerComposite( + REGISTRY_KEY, + List.of(CoreValuesSourceType.BYTES, CoreValuesSourceType.IP), + (valuesSourceConfig, name, hasScript, format, missingBucket, order) -> new CompositeValuesSourceConfig( + name, + valuesSourceConfig.fieldType(), + valuesSourceConfig.getValuesSource(), + valuesSourceConfig.format(), + order, + missingBucket, + hasScript, + ( + BigArrays bigArrays, + IndexReader reader, + int size, + LongConsumer addRequestCircuitBreakerBytes, + CompositeValuesSourceConfig compositeValuesSourceConfig) -> { + + if (valuesSourceConfig.hasGlobalOrdinals() && reader instanceof DirectoryReader) { + ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) compositeValuesSourceConfig + .valuesSource(); + return new GlobalOrdinalValuesSource( + bigArrays, + compositeValuesSourceConfig.fieldType(), + vs::globalOrdinalsValues, + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + } else { + ValuesSource.Bytes vs = (ValuesSource.Bytes) compositeValuesSourceConfig.valuesSource(); + return new BinaryValuesSource( + bigArrays, + addRequestCircuitBreakerBytes, + compositeValuesSourceConfig.fieldType(), + vs::bytesValues, + compositeValuesSourceConfig.format(), + compositeValuesSourceConfig.missingBucket(), + size, + compositeValuesSourceConfig.reverseMul() + ); + } + } + ) + ); + } + + @Override + protected ValuesSourceType getDefaultValuesSourceType() { + return CoreValuesSourceType.BYTES; + } + @Override protected CompositeValuesSourceConfig innerBuild(QueryShardContext queryShardContext, ValuesSourceConfig config) throws IOException { - ValuesSource vs = config.hasValues() ? config.getValuesSource() : null; - if (vs == null) { - // The field is unmapped so we use a value source that can parse any type of values. - // This is needed because the after values are parsed even when there are no values to process. - vs = ValuesSource.Bytes.WithOrdinals.EMPTY; - } - final MappedFieldType fieldType = config.fieldType(); - final DocValueFormat format; - if (format() == null && fieldType instanceof DateFieldMapper.DateFieldType) { - // defaults to the raw format on date fields (preserve timestamp as longs). - format = DocValueFormat.RAW; - } else { - format = config.format(); - } - return new CompositeValuesSourceConfig(name, fieldType, vs, format, order(), missingBucket(), script() != null); + return queryShardContext.getValuesSourceRegistry() + .getComposite(REGISTRY_KEY, config) + .apply(config, name, script() != null, format(), missingBucket(), order()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java index 668cf9b5990cc..824d82d94b3e3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalVariableWidthHistogram.java @@ -400,7 +400,6 @@ protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { } mergeBucketsIfNeeded(reducedBuckets, targetNumBuckets, reduceContext); - return reducedBuckets; } @@ -451,7 +450,8 @@ private void mergeBucketsWithPlan(List buckets, List plan, } toMerge.add(buckets.get(startIdx)); // Don't remove the startIdx bucket because it will be replaced by the merged bucket - reduceContext.consumeBucketsAndMaybeBreak(- (toMerge.size() - 1)); + int toRemove = toMerge.stream().mapToInt(b -> countInnerBucket(b)+1).sum(); + reduceContext.consumeBucketsAndMaybeBreak(-toRemove + 1); Bucket merged_bucket = reduceBucket(toMerge, reduceContext); buckets.set(startIdx, merged_bucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index f5943d7902e63..cd2db4608274f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -561,7 +561,7 @@ private InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws BucketUpdater updater = bucketUpdater(owningBucketOrds[ordIdx]); collectionStrategy.forEach(owningBucketOrds[ordIdx], new BucketInfoConsumer() { TB spare = null; - + @Override public void accept(long globalOrd, long bucketOrd, long docCount) throws IOException { otherDocCount[finalOrdIdx] += docCount; @@ -574,7 +574,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep } } }); - + // Get the top buckets topBucketsPreOrd[ordIdx] = buildBuckets(ordered.size()); for (int i = ordered.size() - 1; i >= 0; --i) { @@ -797,9 +797,14 @@ SignificantStringTerms.Bucket buildEmptyTemporaryBucket() { return new SignificantStringTerms.Bucket(new BytesRef(), 0, 0, 0, 0, null, format, 0); } + private long subsetSize(long owningBucketOrd) { + // if the owningBucketOrd is not in the array that means the bucket is empty so the size has to be 0 + return owningBucketOrd < subsetSizes.size() ? subsetSizes.get(owningBucketOrd) : 0; + } + @Override BucketUpdater bucketUpdater(long owningBucketOrd) throws IOException { - long subsetSize = subsetSizes.get(owningBucketOrd); + long subsetSize = subsetSize(owningBucketOrd); return (spare, globalOrd, bucketOrd, docCount) -> { spare.bucketOrd = bucketOrd; oversizedCopy(lookupGlobalOrd.apply(globalOrd), spare.termBytes); @@ -839,7 +844,7 @@ SignificantStringTerms buildResult(long owningBucketOrd, long otherDocCount, Sig bucketCountThresholds.getMinDocCount(), metadata(), format, - subsetSizes.get(owningBucketOrd), + subsetSize(owningBucketOrd), supersetSize, significanceHeuristic, Arrays.asList(topBuckets) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java index b28d6e4f6147d..c2fe05a0ce176 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistry.java @@ -21,12 +21,14 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.AggregationExecutionException; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import java.util.AbstractMap; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; /** * {@link ValuesSourceRegistry} holds the mapping from {@link ValuesSourceType}s to {@link AggregatorSupplier}s. DO NOT directly @@ -36,14 +38,47 @@ */ public class ValuesSourceRegistry { + public interface CompositeSupplier { + // this interface intentionally left blank + } + + public static final class RegistryKey { + private final String name; + private final Class supplierType; + + public RegistryKey(String name, Class supplierType) { + this.name = Objects.requireNonNull(name); + this.supplierType = Objects.requireNonNull(supplierType); + } + + public String getName() { + return name; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + RegistryKey that = (RegistryKey) o; + return name.equals(that.name) && supplierType.equals(that.supplierType); + } + + @Override + public int hashCode() { + return Objects.hash(name, supplierType); + } + } + public static class Builder { private final AggregationUsageService.Builder usageServiceBuilder; + private Map>> aggregatorRegistry = new HashMap<>(); + private Map, List>> compositeRegistry = + new HashMap<>(); public Builder() { this.usageServiceBuilder = new AggregationUsageService.Builder(); } - private Map>> aggregatorRegistry = new HashMap<>(); /** * Register a ValuesSource to Aggregator mapping. This method registers mappings that only apply to a @@ -78,6 +113,46 @@ public void register(String aggregationName, List valuesSource } } + /** + * Register a new key generation function for the + * {@link org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation}. + * @param registryKey the subclass of {@link CompositeSupplier} associated with the {@link CompositeValuesSourceBuilder} type this + * mapping is being registered for, paired with the name of the key type. + * @param valuesSourceType the {@link ValuesSourceType} this mapping applies to + * @param compositeSupplier A function returning an appropriate + * {@link org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig} + */ + public void registerComposite( + RegistryKey registryKey, + ValuesSourceType valuesSourceType, + T compositeSupplier + ) { + if (compositeRegistry.containsKey(registryKey) == false) { + compositeRegistry.put(registryKey, new ArrayList<>()); + } + compositeRegistry.get(registryKey).add(new AbstractMap.SimpleEntry<>(valuesSourceType, compositeSupplier)); + } + + /** + * Register a new key generation function for the + * {@link org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation}. This is a convenience version to map + * multiple types to the same supplier. + * @param registryKey the subclass of {@link CompositeSupplier} associated with the {@link CompositeValuesSourceBuilder} type this + * mapping is being registered for, paired with the name of the key type. + * @param valuesSourceTypes the {@link ValuesSourceType}s this mapping applies to + * @param compositeSupplier A function returning an appropriate + * {@link org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig} + */ + public void registerComposite( + RegistryKey registryKey, + List valuesSourceTypes, + T compositeSupplier + ) { + for (ValuesSourceType valuesSourceType : valuesSourceTypes) { + registerComposite(registryKey, valuesSourceType, compositeSupplier); + } + } + public void registerUsage(String aggregationName, ValuesSourceType valuesSourceType) { usageServiceBuilder.registerAggregationUsage(aggregationName, valuesSourceType.typeName()); } @@ -87,30 +162,38 @@ public void registerUsage(String aggregationName) { } public ValuesSourceRegistry build() { - return new ValuesSourceRegistry(aggregatorRegistry, usageServiceBuilder.build()); + return new ValuesSourceRegistry(aggregatorRegistry, compositeRegistry, usageServiceBuilder.build()); } } - /** Maps Aggregation names to (ValuesSourceType, Supplier) pairs, keyed by ValuesSourceType */ - private final AggregationUsageService usageService; - private Map> aggregatorRegistry; - public ValuesSourceRegistry(Map>> aggregatorRegistry, - AggregationUsageService usageService) { + private static Map> copyMap(Map>> mutableMap) { /* Make an immutatble copy of our input map. Since this is write once, read many, we'll spend a bit of extra time to shape this into a Map.of(), which is more read optimized than just using a hash map. */ @SuppressWarnings("unchecked") - Map.Entry>[] copiedEntries = new Map.Entry[aggregatorRegistry.size()]; + Map.Entry>[] copiedEntries = new Map.Entry[mutableMap.size()]; int i = 0; - for (Map.Entry>> entry : aggregatorRegistry.entrySet()) { - String aggName = entry.getKey(); - List> values = entry.getValue(); - @SuppressWarnings("unchecked") Map.Entry> newEntry = - Map.entry(aggName, Map.ofEntries(values.toArray(new Map.Entry[0]))); + for (Map.Entry>> entry : mutableMap.entrySet()) { + K topKey = entry.getKey(); + List> values = entry.getValue(); + @SuppressWarnings("unchecked") + Map.Entry> newEntry = Map.entry(topKey, Map.ofEntries(values.toArray(new Map.Entry[0]))); copiedEntries[i++] = newEntry; } - this.aggregatorRegistry = Map.ofEntries(copiedEntries); + return Map.ofEntries(copiedEntries); + } + + /** Maps Aggregation names to (ValuesSourceType, Supplier) pairs, keyed by ValuesSourceType */ + private final AggregationUsageService usageService; + private Map> aggregatorRegistry; + private Map, Map> compositeRegistry; + + public ValuesSourceRegistry(Map>> aggregatorRegistry, + Map, List>> compositeRegistry, + AggregationUsageService usageService) { + this.aggregatorRegistry = copyMap(aggregatorRegistry); + this.compositeRegistry = copyMap(compositeRegistry); this.usageService = usageService; } @@ -139,6 +222,18 @@ public AggregatorSupplier getAggregator(ValuesSourceConfig valuesSourceConfig, S throw new AggregationExecutionException("Unregistered Aggregation [" + aggregationName + "]"); } + public T getComposite(RegistryKey registryKey, ValuesSourceConfig config) { + if (registryKey != null && compositeRegistry.containsKey(registryKey)) { + CompositeSupplier supplier = compositeRegistry.get(registryKey).get(config.valueSourceType()); + if (supplier == null) { + throw new IllegalArgumentException(config.getDescription() + " is not supported for composite source [" + + registryKey.getName() + "]"); + } + return (T) supplier; // Safe because we checked the type matched the key at load time + } + throw new AggregationExecutionException("Unregistered composite source [" + registryKey.getName() + "]"); + } + public AggregationUsageService getUsageService() { return usageService; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index a2e6a16dbb7f8..e0a8e632d05e7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -126,7 +126,8 @@ public void tearDown() throws Exception { FIELD_TYPES = null; } - public void testUnmappedField() throws Exception { + public void testUnmappedFieldWithTerms() throws Exception { + final List>> dataset = new ArrayList<>(); dataset.addAll( Arrays.asList( @@ -204,6 +205,245 @@ public void testUnmappedField() throws Exception { ); } + public void testUnmappedFieldWithGeopoint() throws Exception { + final List>> dataset = new ArrayList<>(); + final String mappedFieldName = "geo_point"; + dataset.addAll( + Arrays.asList( + createDocument(mappedFieldName, new GeoPoint(48.934059, 41.610741)), + createDocument(mappedFieldName, new GeoPoint(-23.065941, 113.610741)), + createDocument(mappedFieldName, new GeoPoint(90.0, 0.0)), + createDocument(mappedFieldName, new GeoPoint(37.2343, -115.8067)), + createDocument(mappedFieldName, new GeoPoint(90.0, 0.0)) + ) + ); + + // just unmapped = no results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder("name", + Arrays.asList( + new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped") + ) + ), + (result) -> assertEquals(0, result.getBuckets().size()) + ); + + // unmapped missing bucket = one result + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder("name", + Arrays.asList( + new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped").missingBucket(true) + ) + ), + (result) -> { + assertEquals(1, result.getBuckets().size()); + assertEquals("{unmapped=null}", result.afterKey().toString()); + assertEquals("{unmapped=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(5L, result.getBuckets().get(0).getDocCount()); + } + ); + + // field + unmapped, no missing bucket = no results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder("name", + Arrays.asList( + new GeoTileGridValuesSourceBuilder(mappedFieldName).field(mappedFieldName), + new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped") + ) + ), + (result) -> assertEquals(0, result.getBuckets().size()) + ); + + // field + unmapped with missing bucket = multiple results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder("name", + Arrays.asList( + new GeoTileGridValuesSourceBuilder(mappedFieldName).field(mappedFieldName), + new GeoTileGridValuesSourceBuilder("unmapped") .field("unmapped").missingBucket(true) + ) + ), + (result) -> { + assertEquals(2, result.getBuckets().size()); + assertEquals("{geo_point=7/64/56, unmapped=null}", result.afterKey().toString()); + assertEquals("{geo_point=7/32/56, unmapped=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{geo_point=7/64/56, unmapped=null}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(3L, result.getBuckets().get(1).getDocCount()); + } + ); + + } + + public void testUnmappedFieldWithHistogram() throws Exception { + final List>> dataset = new ArrayList<>(); + final String mappedFieldName = "price"; + dataset.addAll( + Arrays.asList( + createDocument(mappedFieldName, 103L), + createDocument(mappedFieldName, 51L), + createDocument(mappedFieldName, 56L), + createDocument(mappedFieldName, 105L), + createDocument(mappedFieldName, 25L) + ) + ); + + // just unmapped = no results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new HistogramValuesSourceBuilder("unmapped").field("unmapped").interval(10)) + ), + (result) -> assertEquals(0, result.getBuckets().size()) + ); + // unmapped missing bucket = one result + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList(new HistogramValuesSourceBuilder("unmapped").field("unmapped").interval(10).missingBucket(true)) + ), + (result) -> { + assertEquals(1, result.getBuckets().size()); + assertEquals("{unmapped=null}", result.afterKey().toString()); + assertEquals("{unmapped=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(5L, result.getBuckets().get(0).getDocCount()); + } + ); + + // field + unmapped, no missing bucket = no results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new HistogramValuesSourceBuilder(mappedFieldName).field(mappedFieldName).interval(10), + new HistogramValuesSourceBuilder("unmapped").field("unmapped").interval(10) + ) + ), + (result) -> assertEquals(0, result.getBuckets().size()) + ); + + // field + unmapped with missing bucket = multiple results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new HistogramValuesSourceBuilder(mappedFieldName).field(mappedFieldName).interval(10), + new HistogramValuesSourceBuilder("unmapped").field("unmapped").interval(10).missingBucket(true) + ) + ), + (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{price=100.0, unmapped=null}", result.afterKey().toString()); + assertEquals("{price=20.0, unmapped=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(0).getDocCount()); + assertEquals("{price=50.0, unmapped=null}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("{price=100.0, unmapped=null}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + } + ); + } + + public void testUnmappedFieldWithDateHistogram() throws Exception { + String mappedFieldName = "date"; + final List>> dataset = new ArrayList<>(); + dataset.addAll( + Arrays.asList( + createDocument(mappedFieldName, asLong("2017-10-20T03:08:45")), + createDocument(mappedFieldName, asLong("2016-09-20T09:00:34")), + createDocument(mappedFieldName, asLong("2016-09-20T11:34:00")), + createDocument(mappedFieldName, asLong("2017-10-20T06:09:24")), + createDocument(mappedFieldName, asLong("2017-10-19T06:09:24")) + ) + ); + // just unmapped = no results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder("unmapped").field("unmapped").calendarInterval(DateHistogramInterval.days(1)) + ) + ), + (result) -> assertEquals(0, result.getBuckets().size()) + ); + // unmapped missing bucket = one result + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder("unmapped").field("unmapped") + .calendarInterval(DateHistogramInterval.days(1)) + .missingBucket(true) + ) + ), + (result) -> { + assertEquals(1, result.getBuckets().size()); + assertEquals("{unmapped=null}", result.afterKey().toString()); + assertEquals("{unmapped=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(5L, result.getBuckets().get(0).getDocCount()); + } + ); + + // field + unmapped, no missing bucket = no results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new HistogramValuesSourceBuilder(mappedFieldName).field(mappedFieldName).interval(10), + new DateHistogramValuesSourceBuilder("unmapped").field("unmapped").calendarInterval(DateHistogramInterval.days(1)) + ) + ), + (result) -> assertEquals(0, result.getBuckets().size()) + ); + + // field + unmapped with missing bucket = multiple results + testSearchCase( + Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(mappedFieldName)), + dataset, + () -> new CompositeAggregationBuilder( + "name", + Arrays.asList( + new DateHistogramValuesSourceBuilder(mappedFieldName).field(mappedFieldName) + .calendarInterval(DateHistogramInterval.days(1)), + new DateHistogramValuesSourceBuilder("unmapped").field("unmapped") + .calendarInterval(DateHistogramInterval.days(1)) + .missingBucket(true) + ) + ), + (result) -> { + assertEquals(3, result.getBuckets().size()); + assertEquals("{date=1508457600000, unmapped=null}", result.afterKey().toString()); + assertEquals("{date=1474329600000, unmapped=null}", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("{date=1508371200000, unmapped=null}", result.getBuckets().get(1).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(1).getDocCount()); + assertEquals("{date=1508457600000, unmapped=null}", result.getBuckets().get(2).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(2).getDocCount()); + } + ); + } + public void testWithKeyword() throws Exception { final List>> dataset = new ArrayList<>(); dataset.addAll( @@ -1879,7 +2119,7 @@ public void testEarlyTermination() throws Exception { ) ); - executeTestCase(true, false, new TermQuery(new Term("foo", "bar")), + executeTestCase(true, new TermQuery(new Term("foo", "bar")), dataset, () -> new CompositeAggregationBuilder("name", @@ -1899,7 +2139,7 @@ public void testEarlyTermination() throws Exception { ); // source field and index sorting config have different order - executeTestCase(true, false, new TermQuery(new Term("foo", "bar")), + executeTestCase(true, new TermQuery(new Term("foo", "bar")), dataset, () -> new CompositeAggregationBuilder("name", @@ -1936,7 +2176,7 @@ public void testIndexSortWithDuplicate() throws Exception { ); for (SortOrder order : SortOrder.values()) { - executeTestCase(true, false, new MatchAllDocsQuery(), + executeTestCase(true, new MatchAllDocsQuery(), dataset, () -> new CompositeAggregationBuilder("name", @@ -1959,7 +2199,7 @@ public void testIndexSortWithDuplicate() throws Exception { } ); - executeTestCase(true, false, new MatchAllDocsQuery(), + executeTestCase(true, new MatchAllDocsQuery(), dataset, () -> new CompositeAggregationBuilder("name", @@ -1989,14 +2229,12 @@ private void testSearchCase(List queries, Supplier create, Consumer verify) throws IOException { for (Query query : queries) { - executeTestCase(false, false, query, dataset, create, verify); - executeTestCase(false, true, query, dataset, create, verify); - executeTestCase(true, true, query, dataset, create, verify); + executeTestCase(false, query, dataset, create, verify); + executeTestCase(true, query, dataset, create, verify); } } private void executeTestCase(boolean useIndexSort, - boolean reduced, Query query, List>> dataset, Supplier create, @@ -2019,18 +2257,13 @@ private void executeTestCase(boolean useIndexSort, indexWriter.addDocument(document); document.clear(); } - if (reduced == false && randomBoolean()) { + if (rarely()) { indexWriter.forceMerge(1); } } try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = new IndexSearcher(indexReader); - final InternalComposite composite; - if (reduced) { - composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES); - } else { - composite = search(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES); - } + InternalComposite composite = searchAndReduce(indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES); verify.accept(composite); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java index 5afad0400149a..e1b1e951df470 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FilterAggregatorTests.java @@ -62,7 +62,7 @@ public void testEmpty() throws Exception { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); QueryBuilder filter = QueryBuilders.termQuery("field", randomAlphaOfLength(5)); FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter); - InternalFilter response = search(indexSearcher, new MatchAllDocsQuery(), builder, + InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(response.getDocCount(), 0); assertFalse(AggregationInspectionHelper.hasValue(response)); @@ -80,7 +80,7 @@ public void testRandom() throws Exception { for (int i = 0; i < numDocs; i++) { if (frequently()) { // make sure we have more than one segment to test the merge - indexWriter.getReader().close(); + indexWriter.commit(); } int value = randomInt(maxTerm-1); expectedBucketCount[value] += 1; @@ -98,20 +98,12 @@ public void testRandom() throws Exception { QueryBuilder filter = QueryBuilders.termQuery("field", Integer.toString(value)); FilterAggregationBuilder builder = new FilterAggregationBuilder("test", filter); - for (boolean doReduce : new boolean[]{true, false}) { - final InternalFilter response; - if (doReduce) { - response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, - fieldType); - } else { - response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); - } - assertEquals(response.getDocCount(), (long) expectedBucketCount[value]); - if (expectedBucketCount[value] > 0) { - assertTrue(AggregationInspectionHelper.hasValue(response)); - } else { - assertFalse(AggregationInspectionHelper.hasValue(response)); - } + final InternalFilter response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); + assertEquals(response.getDocCount(), (long) expectedBucketCount[value]); + if (expectedBucketCount[value] > 0) { + assertTrue(AggregationInspectionHelper.hasValue(response)); + } else { + assertFalse(AggregationInspectionHelper.hasValue(response)); } } finally { indexReader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java index 265115e720600..617eec9799a4d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregatorTests.java @@ -60,7 +60,7 @@ public void testEmpty() throws Exception { } FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", filters); builder.otherBucketKey("other"); - InternalFilters response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); + InternalFilters response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(response.getBuckets().size(), numFilters); for (InternalFilters.InternalBucket filter : response.getBuckets()) { assertEquals(filter.getDocCount(), 0); @@ -113,22 +113,15 @@ public void testKeyedFilter() throws Exception { FiltersAggregationBuilder builder = new FiltersAggregationBuilder("test", keys); builder.otherBucket(true); builder.otherBucketKey("other"); - for (boolean doReduce : new boolean[] {true, false}) { - final InternalFilters filters; - if (doReduce) { - filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); - } else { - filters = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); - } - assertEquals(filters.getBuckets().size(), 7); - assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2); - assertEquals(filters.getBucketByKey("foo").getDocCount(), 2); - assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2); - assertEquals(filters.getBucketByKey("bar").getDocCount(), 1); - assertEquals(filters.getBucketByKey("same").getDocCount(), 1); - assertEquals(filters.getBucketByKey("other").getDocCount(), 2); - assertTrue(AggregationInspectionHelper.hasValue(filters)); - } + final InternalFilters filters = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); + assertEquals(filters.getBuckets().size(), 7); + assertEquals(filters.getBucketByKey("foobar").getDocCount(), 2); + assertEquals(filters.getBucketByKey("foo").getDocCount(), 2); + assertEquals(filters.getBucketByKey("foo2").getDocCount(), 2); + assertEquals(filters.getBucketByKey("bar").getDocCount(), 1); + assertEquals(filters.getBucketByKey("same").getDocCount(), 1); + assertEquals(filters.getBucketByKey("other").getDocCount(), 2); + assertTrue(AggregationInspectionHelper.hasValue(filters)); indexReader.close(); directory.close(); @@ -175,28 +168,21 @@ public void testRandom() throws Exception { builder.otherBucket(true); builder.otherBucketKey("other"); - for (boolean doReduce : new boolean[]{true, false}) { - final InternalFilters response; - if (doReduce) { - response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); - } else { - response = search(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); - } - List buckets = response.getBuckets(); - assertEquals(buckets.size(), filters.length + 1); + final InternalFilters response = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, fieldType); + List buckets = response.getBuckets(); + assertEquals(buckets.size(), filters.length + 1); - for (InternalFilters.InternalBucket bucket : buckets) { - if ("other".equals(bucket.getKey())) { - assertEquals(bucket.getDocCount(), expectedOtherCount); - } else { - int index = Integer.parseInt(bucket.getKey()); - assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]); - } + for (InternalFilters.InternalBucket bucket : buckets) { + if ("other".equals(bucket.getKey())) { + assertEquals(bucket.getDocCount(), expectedOtherCount); + } else { + int index = Integer.parseInt(bucket.getKey()); + assertEquals(bucket.getDocCount(), (long) expectedBucketCount[filterTerms[index]]); } - - // Always true because we include 'other' in the agg - assertTrue(AggregationInspectionHelper.hasValue(response)); } + + // Always true because we include 'other' in the agg + assertTrue(AggregationInspectionHelper.hasValue(response)); } finally { indexReader.close(); directory.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java index 477df9e59163c..4cb34acab2449 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -98,7 +98,7 @@ public class AutoDateHistogramAggregatorTests extends DateHistogramAggregatorTes private static final Query DEFAULT_QUERY = new MatchAllDocsQuery(); public void testMatchNoDocs() throws IOException { - testBothCases(new MatchNoDocsQuery(), DATES_WITH_TIME, + testSearchCase(new MatchNoDocsQuery(), DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), histogram -> { assertEquals(0, histogram.getBuckets().size()); @@ -115,20 +115,16 @@ public void testMatchAllDocs() throws IOException { expectedDocCount.put("2015-01-01T00:00:00.000Z", 3); expectedDocCount.put("2016-01-01T00:00:00.000Z", 1); expectedDocCount.put("2017-01-01T00:00:00.000Z", 1); - testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, - aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), - result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); expectedDocCount.put("2011-01-01T00:00:00.000Z", 0); expectedDocCount.put("2014-01-01T00:00:00.000Z", 0); - testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME, + testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); } public void testSubAggregations() throws IOException { - testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME, + testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD) .subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)), histogram -> { @@ -249,7 +245,7 @@ public void testAsSubAgg() throws IOException { expectedMax.put("2020-01-01T00:00:00.000Z", 2.0); expectedMax.put("2021-01-01T00:00:00.000Z", 3.0); assertThat(maxAsMap(ak1adh), equalTo(expectedMax)); - + StringTerms.Bucket b = terms.getBucketByKey("b"); StringTerms bk1 = b.getAggregations().get("k1"); StringTerms.Bucket bk1a = bk1.getBucketByKey("a"); @@ -391,7 +387,7 @@ public void testNoDocs() throws IOException { assertFalse(AggregationInspectionHelper.hasValue(histogram)); } ); - testSearchAndReduceCase(DEFAULT_QUERY, dates, aggregation, + testSearchCase(DEFAULT_QUERY, dates, aggregation, histogram -> { assertEquals(0, histogram.getBuckets().size()); assertFalse(AggregationInspectionHelper.hasValue(histogram)); @@ -431,7 +427,7 @@ public void testIntervalYear() throws IOException { final long start = LocalDate.of(2015, 1, 1).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli(); final long end = LocalDate.of(2017, 12, 31).atStartOfDay(ZoneOffset.UTC).toInstant().toEpochMilli(); final Query rangeQuery = LongPoint.newRangeQuery(INSTANT_FIELD, start, end); - testBothCases(rangeQuery, DATES_WITH_TIME, + testSearchCase(rangeQuery, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { final ZonedDateTime startDate = ZonedDateTime.of(2015, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); @@ -460,7 +456,7 @@ public void testIntervalMonth() throws IOException { expectedDocCount.put("2017-01-01T00:00:00.000Z", 1); expectedDocCount.put("2017-02-01T00:00:00.000Z", 2); expectedDocCount.put("2017-03-01T00:00:00.000Z", 3); - testBothCases(DEFAULT_QUERY, datesForMonthInterval, + testSearchCase(DEFAULT_QUERY, datesForMonthInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -490,12 +486,8 @@ public void testIntervalDay() throws IOException { expectedDocCount.put("2017-02-02T00:00:00.000Z", 2); expectedDocCount.put("2017-02-03T00:00:00.000Z", 3); expectedDocCount.put("2017-02-05T00:00:00.000Z", 1); - testSearchCase(DEFAULT_QUERY, datesForDayInterval, - aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), - result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); expectedDocCount.put("2017-02-04T00:00:00.000Z", 0); - testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval, + testSearchCase(DEFAULT_QUERY, datesForDayInterval, aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -515,12 +507,8 @@ public void testIntervalDayWithTZ() throws IOException { expectedDocCount.put("2017-02-01T00:00:00.000-01:00", 2); expectedDocCount.put("2017-02-02T00:00:00.000-01:00", 3); expectedDocCount.put("2017-02-04T00:00:00.000-01:00", 1); - testSearchCase(DEFAULT_QUERY, datesForDayInterval, - aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), - result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); expectedDocCount.put("2017-02-03T00:00:00.000-01:00", 0); - testSearchAndReduceCase(DEFAULT_QUERY, datesForDayInterval, + testSearchCase(DEFAULT_QUERY, datesForDayInterval, aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -546,13 +534,9 @@ public void testIntervalHour() throws IOException { expectedDocCount.put("2017-02-01T15:00:00.000Z", 1); expectedDocCount.put("2017-02-01T15:00:00.000Z", 1); expectedDocCount.put("2017-02-01T16:00:00.000Z", 3); - testSearchCase(DEFAULT_QUERY, datesForHourInterval, - aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), - result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); expectedDocCount.put("2017-02-01T11:00:00.000Z", 0); expectedDocCount.put("2017-02-01T12:00:00.000Z", 0); - testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval, + testSearchCase(DEFAULT_QUERY, datesForHourInterval, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -560,7 +544,7 @@ public void testIntervalHour() throws IOException { expectedDocCount.put("2017-02-01T09:00:00.000Z", 3); expectedDocCount.put("2017-02-01T12:00:00.000Z", 3); expectedDocCount.put("2017-02-01T15:00:00.000Z", 4); - testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval, + testSearchCase(DEFAULT_QUERY, datesForHourInterval, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -585,13 +569,9 @@ public void testIntervalHourWithTZ() throws IOException { expectedDocCount.put("2017-02-01T13:00:00.000-01:00", 2); expectedDocCount.put("2017-02-01T14:00:00.000-01:00", 1); expectedDocCount.put("2017-02-01T15:00:00.000-01:00", 3); - testSearchCase(DEFAULT_QUERY, datesForHourInterval, - aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), - result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); expectedDocCount.put("2017-02-01T10:00:00.000-01:00", 0); expectedDocCount.put("2017-02-01T11:00:00.000-01:00", 0); - testSearchAndReduceCase(DEFAULT_QUERY, datesForHourInterval, + testSearchCase(DEFAULT_QUERY, datesForHourInterval, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(ZoneOffset.ofHours(-1)), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); @@ -612,7 +592,7 @@ public void testRandomSecondIntervals() throws IOException { bucketsToExpectedDocCountMap.put(10, 30); bucketsToExpectedDocCountMap.put(3, 60); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -641,7 +621,7 @@ public void testRandomMinuteIntervals() throws IOException { bucketsToExpectedDocCountMap.put(10, 30); bucketsToExpectedDocCountMap.put(3, 60); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -669,7 +649,7 @@ public void testRandomHourIntervals() throws IOException { bucketsToExpectedDocCountMap.put(12, 12); bucketsToExpectedDocCountMap.put(3, 24); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -693,7 +673,7 @@ public void testRandomDayIntervals() throws IOException { } final int randomChoice = randomIntBetween(1, 3); if (randomChoice == 1) { - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(length).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -704,7 +684,7 @@ public void testRandomDayIntervals() throws IOException { assertEquals(1, bucket.getDocCount()); }); } else if (randomChoice == 2) { - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -716,7 +696,7 @@ public void testRandomDayIntervals() throws IOException { assertEquals(expectedDocCount, bucket.getDocCount()); }); } else if (randomChoice == 3) { - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -742,7 +722,7 @@ public void testRandomMonthIntervals() throws IOException { bucketsToExpectedDocCountMap.put(30, 3); bucketsToExpectedDocCountMap.put(6, 12); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -772,7 +752,7 @@ public void testRandomYearIntervals() throws IOException { bucketsToExpectedDocCountMap.put(10, 50); bucketsToExpectedDocCountMap.put(5, 100); final Map.Entry randomEntry = randomFrom(bucketsToExpectedDocCountMap.entrySet()); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, + testSearchCase(DEFAULT_QUERY, dataset, aggregation -> aggregation.setNumBuckets(randomEntry.getKey()).field(DATE_FIELD), histogram -> { final List buckets = histogram.getBuckets(); @@ -797,29 +777,20 @@ public void testIntervalMinute() throws IOException { skeletonDocCount.put("2017-02-01T09:02:00.000Z", 2); skeletonDocCount.put("2017-02-01T09:15:00.000Z", 1); skeletonDocCount.put("2017-02-01T09:16:00.000Z", 2); - testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, - aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), - result -> assertThat(bucketCountsAsMap(result), equalTo(skeletonDocCount)) - ); Map fullDocCount = new TreeMap<>(); fullDocCount.put("2017-02-01T09:02:00.000Z", 2); fullDocCount.put("2017-02-01T09:07:00.000Z", 0); fullDocCount.put("2017-02-01T09:12:00.000Z", 3); - testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval, + testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount)) ); - - testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, - aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), - result -> assertThat(bucketCountsAsMap(result), equalTo(skeletonDocCount)) - ); fullDocCount.clear(); fullDocCount.putAll(skeletonDocCount); for (int minute = 3; minute < 15; minute++) { - fullDocCount.put(String.format(Locale.ROOT, "2017-02-01T09:%02d:00.000Z", minute), 0); + fullDocCount.put(String.format(Locale.ROOT, "2017-02-01T09:%02d:00.000Z", minute), 0); } - testSearchAndReduceCase(DEFAULT_QUERY, datesForMinuteInterval, + testSearchCase(DEFAULT_QUERY, datesForMinuteInterval, aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(fullDocCount)) ); @@ -837,22 +808,18 @@ public void testIntervalSecond() throws IOException { expectedDocCount.put("2017-02-01T00:00:05.000Z", 1); expectedDocCount.put("2017-02-01T00:00:07.000Z", 2); expectedDocCount.put("2017-02-01T00:00:11.000Z", 3); - testSearchCase(DEFAULT_QUERY, datesForSecondInterval, - aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), - result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) - ); expectedDocCount.put("2017-02-01T00:00:06.000Z", 0); expectedDocCount.put("2017-02-01T00:00:08.000Z", 0); expectedDocCount.put("2017-02-01T00:00:09.000Z", 0); expectedDocCount.put("2017-02-01T00:00:10.000Z", 0); - testSearchAndReduceCase(DEFAULT_QUERY, datesForSecondInterval, + testSearchCase(DEFAULT_QUERY, datesForSecondInterval, aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), result -> assertThat(bucketCountsAsMap(result), equalTo(expectedDocCount)) ); } public void testWithPipelineReductions() throws IOException { - testSearchAndReduceCase(DEFAULT_QUERY, DATES_WITH_TIME, + testSearchCase(DEFAULT_QUERY, DATES_WITH_TIME, aggregation -> aggregation.setNumBuckets(1).field(DATE_FIELD) .subAggregation(AggregationBuilders.histogram("histo").field(NUMERIC_FIELD).interval(1) .subAggregation(AggregationBuilders.max("max").field(NUMERIC_FIELD)) @@ -880,25 +847,6 @@ public void testWithPipelineReductions() throws IOException { }); } - private void testSearchCase(final Query query, final List dataset, - final Consumer configure, - final Consumer verify) throws IOException { - executeTestCase(false, query, dataset, configure, verify); - } - - private void testSearchAndReduceCase(final Query query, final List dataset, - final Consumer configure, - final Consumer verify) throws IOException { - executeTestCase(true, query, dataset, configure, verify); - } - - private void testBothCases(final Query query, final List dataset, - final Consumer configure, - final Consumer verify) throws IOException { - executeTestCase(false, query, dataset, configure, verify); - executeTestCase(true, query, dataset, configure, verify); - } - @Override protected IndexSettings createIndexSettings() { final Settings nodeSettings = Settings.builder() @@ -913,7 +861,7 @@ protected IndexSettings createIndexSettings() { ); } - private void executeTestCase(final boolean reduced, final Query query, final List dataset, + private void testSearchCase(final Query query, final List dataset, final Consumer configure, final Consumer verify) throws IOException { try (Directory directory = newDirectory()) { @@ -936,12 +884,8 @@ private void executeTestCase(final boolean reduced, final Query query, final Lis MappedFieldType numericFieldType = new NumberFieldMapper.NumberFieldType(NUMERIC_FIELD, NumberFieldMapper.NumberType.LONG); - final InternalAutoDateHistogram histogram; - if (reduced) { - histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType); - } else { - histogram = search(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType); - } + final InternalAutoDateHistogram histogram = + searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType, instantFieldType, numericFieldType); verify.accept(histogram); } } @@ -951,10 +895,6 @@ private void indexSampleData(List dataset, RandomIndexWriter inde final Document document = new Document(); int i = 0; for (final ZonedDateTime date : dataset) { - if (frequently()) { - indexWriter.commit(); - } - final long instant = date.toInstant().toEpochMilli(); document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); document.add(new LongPoint(INSTANT_FIELD, instant)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 4d84a854fc57f..d297d9fabc613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -67,7 +67,7 @@ public class DateHistogramAggregatorTests extends DateHistogramAggregatorTestCas "2017-12-12T22:55:46"); public void testMatchNoDocsDeprecatedInterval() throws IOException { - testBothCases(new MatchNoDocsQuery(), DATASET, + testSearchCase(new MatchNoDocsQuery(), DATASET, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> { assertEquals(0, histogram.getBuckets().size()); @@ -78,11 +78,11 @@ public void testMatchNoDocsDeprecatedInterval() throws IOException { } public void testMatchNoDocs() throws IOException { - testBothCases(new MatchNoDocsQuery(), DATASET, + testSearchCase(new MatchNoDocsQuery(), DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> assertEquals(0, histogram.getBuckets().size()), false ); - testBothCases(new MatchNoDocsQuery(), DATASET, + testSearchCase(new MatchNoDocsQuery(), DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE), histogram -> assertEquals(0, histogram.getBuckets().size()), false ); @@ -92,20 +92,13 @@ public void testMatchAllDocsDeprecatedInterval() throws IOException { Query query = new MatchAllDocsQuery(); testSearchCase(query, DATASET, - aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), - histogram -> { - assertEquals(6, histogram.getBuckets().size()); - assertTrue(AggregationInspectionHelper.hasValue(histogram)); - }, false - ); - testSearchAndReduceCase(query, DATASET, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> { assertEquals(8, histogram.getBuckets().size()); assertTrue(AggregationInspectionHelper.hasValue(histogram)); }, false ); - testBothCases(query, DATASET, + testSearchCase(query, DATASET, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L), histogram -> { assertEquals(6, histogram.getBuckets().size()); @@ -122,34 +115,26 @@ public void testMatchAllDocs() throws IOException { for (int i = 0; i < 1000; i++) { foo.add(DATASET.get(randomIntBetween(0, DATASET.size()-1))); } - testSearchAndReduceCase(query, foo, + testSearchCase(query, foo, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")) .field(AGGREGABLE_DATE).order(BucketOrder.count(false)), histogram -> assertEquals(8, histogram.getBuckets().size()), false ); testSearchCase(query, DATASET, - aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), - histogram -> assertEquals(6, histogram.getBuckets().size()), false - ); - testSearchAndReduceCase(query, DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> assertEquals(8, histogram.getBuckets().size()), false ); - testBothCases(query, DATASET, + testSearchCase(query, DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE).minDocCount(1L), histogram -> assertEquals(6, histogram.getBuckets().size()), false ); testSearchCase(query, DATASET, - aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE), - histogram -> assertEquals(6, histogram.getBuckets().size()), false - ); - testSearchAndReduceCase(query, DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE), histogram -> assertEquals(8, histogram.getBuckets().size()), false ); - testBothCases(query, DATASET, + testSearchCase(query, DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field(AGGREGABLE_DATE).minDocCount(1L), histogram -> assertEquals(6, histogram.getBuckets().size()), false ); @@ -206,7 +191,7 @@ public void testNoDocsDeprecatedInterval() throws IOException { assertEquals(0, histogram.getBuckets().size()); assertFalse(AggregationInspectionHelper.hasValue(histogram)); }, false); - testSearchAndReduceCase(query, dates, aggregation, histogram -> { + testSearchCase(query, dates, aggregation, histogram -> { assertEquals(0, histogram.getBuckets().size()); assertFalse(AggregationInspectionHelper.hasValue(histogram)); }, false); @@ -221,7 +206,7 @@ public void testNoDocs() throws IOException { testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false ); - testSearchAndReduceCase(query, dates, aggregation, + testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false ); @@ -230,13 +215,13 @@ public void testNoDocs() throws IOException { testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false ); - testSearchAndReduceCase(query, dates, aggregation, + testSearchCase(query, dates, aggregation, histogram -> assertEquals(0, histogram.getBuckets().size()), false ); } public void testAggregateWrongFieldDeprecated() throws IOException { - testBothCases(new MatchAllDocsQuery(), DATASET, + testSearchCase(new MatchAllDocsQuery(), DATASET, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field("wrong_field"), histogram -> { assertEquals(0, histogram.getBuckets().size()); @@ -247,18 +232,18 @@ public void testAggregateWrongFieldDeprecated() throws IOException { } public void testAggregateWrongField() throws IOException { - testBothCases(new MatchAllDocsQuery(), DATASET, + testSearchCase(new MatchAllDocsQuery(), DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field("wrong_field"), histogram -> assertEquals(0, histogram.getBuckets().size()), false ); - testBothCases(new MatchAllDocsQuery(), DATASET, + testSearchCase(new MatchAllDocsQuery(), DATASET, aggregation -> aggregation.fixedInterval(new DateHistogramInterval("365d")).field("wrong_field"), histogram -> assertEquals(0, histogram.getBuckets().size()), false ); } public void testIntervalYearDeprecated() throws IOException { - testBothCases(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET, + testSearchCase(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> { List buckets = histogram.getBuckets(); @@ -281,7 +266,7 @@ public void testIntervalYearDeprecated() throws IOException { } public void testIntervalYear() throws IOException { - testBothCases(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET, + testSearchCase(LongPoint.newRangeQuery(SEARCHABLE_DATE, asLong("2015-01-01"), asLong("2017-12-31")), DATASET, aggregation -> aggregation.calendarInterval(DateHistogramInterval.YEAR).field(AGGREGABLE_DATE), histogram -> { List buckets = histogram.getBuckets(); @@ -303,7 +288,7 @@ public void testIntervalYear() throws IOException { } public void testIntervalMonthDeprecated() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE), histogram -> { @@ -327,7 +312,7 @@ public void testIntervalMonthDeprecated() throws IOException { } public void testIntervalMonth() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), aggregation -> aggregation.calendarInterval(DateHistogramInterval.MONTH).field(AGGREGABLE_DATE), histogram -> { @@ -350,7 +335,7 @@ public void testIntervalMonth() throws IOException { } public void testIntervalDayDeprecated() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01", "2017-02-02", @@ -386,7 +371,7 @@ public void testIntervalDayDeprecated() throws IOException { } public void testIntervalDay() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01", "2017-02-02", @@ -418,7 +403,7 @@ public void testIntervalDay() throws IOException { assertEquals(1, bucket.getDocCount()); }, false ); - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01", "2017-02-02", @@ -453,7 +438,7 @@ public void testIntervalDay() throws IOException { } public void testIntervalHourDeprecated() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", @@ -500,7 +485,7 @@ public void testIntervalHourDeprecated() throws IOException { } public void testIntervalHour() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", @@ -543,7 +528,7 @@ public void testIntervalHour() throws IOException { assertEquals(3, bucket.getDocCount()); }, false ); - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:00.000Z", "2017-02-01T09:35:00.000Z", @@ -589,7 +574,7 @@ public void testIntervalHour() throws IOException { } public void testIntervalMinuteDeprecated() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", @@ -619,7 +604,7 @@ public void testIntervalMinuteDeprecated() throws IOException { } public void testIntervalMinute() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", @@ -645,7 +630,7 @@ public void testIntervalMinute() throws IOException { assertEquals(2, bucket.getDocCount()); }, false ); - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T09:02:35.000Z", "2017-02-01T09:02:59.000Z", @@ -674,7 +659,7 @@ public void testIntervalMinute() throws IOException { } public void testIntervalSecondDeprecated() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", @@ -705,7 +690,7 @@ public void testIntervalSecondDeprecated() throws IOException { } public void testIntervalSecond() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", @@ -732,7 +717,7 @@ public void testIntervalSecond() throws IOException { assertEquals(3, bucket.getDocCount()); }, false ); - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015Z", "2017-02-01T00:00:11.299Z", @@ -762,7 +747,7 @@ public void testIntervalSecond() throws IOException { } public void testNanosIntervalSecond() throws IOException { - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015298384Z", "2017-02-01T00:00:11.299954583Z", @@ -789,7 +774,7 @@ public void testNanosIntervalSecond() throws IOException { assertEquals(3, bucket.getDocCount()); }, true ); - testBothCases(new MatchAllDocsQuery(), + testSearchCase(new MatchAllDocsQuery(), Arrays.asList( "2017-02-01T00:00:05.015298384Z", "2017-02-01T00:00:11.299954583Z", @@ -829,7 +814,7 @@ public void testMinDocCountDeprecated() throws IOException { ); // 5 sec interval with minDocCount = 0 - testSearchAndReduceCase(query, timestamps, + testSearchCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L), histogram -> { List buckets = histogram.getBuckets(); @@ -854,7 +839,7 @@ public void testMinDocCountDeprecated() throws IOException { ); // 5 sec interval with minDocCount = 3 - testSearchAndReduceCase(query, timestamps, + testSearchCase(query, timestamps, aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L), histogram -> { List buckets = histogram.getBuckets(); @@ -879,7 +864,7 @@ public void testMinDocCount() throws IOException { ); // 5 sec interval with minDocCount = 0 - testSearchAndReduceCase(query, timestamps, + testSearchCase(query, timestamps, aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(0L), histogram -> { List buckets = histogram.getBuckets(); @@ -904,7 +889,7 @@ public void testMinDocCount() throws IOException { ); // 5 sec interval with minDocCount = 3 - testSearchAndReduceCase(query, timestamps, + testSearchCase(query, timestamps, aggregation -> aggregation.fixedInterval(DateHistogramInterval.seconds(5)).field(AGGREGABLE_DATE).minDocCount(3L), histogram -> { List buckets = histogram.getBuckets(); @@ -1175,43 +1160,6 @@ private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify, int maxBucket, boolean useNanosecondResolution) throws IOException { - executeTestCase(false, query, dataset, configure, verify, maxBucket, useNanosecondResolution); - } - - private void testSearchAndReduceCase(Query query, List dataset, - Consumer configure, - Consumer verify, boolean useNanosecondResolution) throws IOException { - testSearchAndReduceCase(query, dataset, configure, verify, 1000, useNanosecondResolution); - } - - private void testSearchAndReduceCase(Query query, List dataset, - Consumer configure, - Consumer verify, - int maxBucket, boolean useNanosecondResolution) throws IOException { - executeTestCase(true, query, dataset, configure, verify, maxBucket, useNanosecondResolution); - } - - private void testBothCases(Query query, List dataset, - Consumer configure, - Consumer verify, boolean useNanosecondResolution) throws IOException { - testBothCases(query, dataset, configure, verify, 10000, useNanosecondResolution); - } - - private void testBothCases(Query query, List dataset, - Consumer configure, - Consumer verify, - int maxBucket, boolean useNanosecondResolution) throws IOException { - testSearchCase(query, dataset, configure, verify, maxBucket, useNanosecondResolution); - testSearchAndReduceCase(query, dataset, configure, verify, maxBucket, useNanosecondResolution); - } - - private void executeTestCase(boolean reduced, - Query query, - List dataset, - Consumer configure, - Consumer verify, - int maxBucket, boolean useNanosecondResolution) throws IOException { - boolean aggregableDateIsSearchable = randomBoolean(); DateFieldMapper.DateFieldType fieldType = aggregableDateFieldType(useNanosecondResolution, aggregableDateIsSearchable); @@ -1220,10 +1168,6 @@ private void executeTestCase(boolean reduced, try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (String date : dataset) { - if (frequently()) { - indexWriter.commit(); - } - long instant = asLong(date, fieldType); document.add(new SortedNumericDocValuesField(AGGREGABLE_DATE, instant)); if (aggregableDateIsSearchable) { @@ -1243,12 +1187,7 @@ private void executeTestCase(boolean reduced, configure.accept(aggregationBuilder); } - InternalDateHistogram histogram; - if (reduced) { - histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, null, fieldType); - } else { - histogram = search(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); - } + InternalDateHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); verify.accept(histogram); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java index b41aa2896d3be..81426688c3e94 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/NumericHistogramAggregatorTests.java @@ -55,7 +55,7 @@ public class NumericHistogramAggregatorTests extends AggregatorTestCase { public void testLongs() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (long value : new long[] {7, 3, -10, -6, 5, 50}) { + for (long value : new long[] {7, 3, -10, -6, 5, 15}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); @@ -66,16 +66,20 @@ public void testLongs() throws Exception { .interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); - assertEquals(4, histogram.getBuckets().size()); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); + assertEquals(6, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); - assertEquals(0d, histogram.getBuckets().get(1).getKey()); - assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - assertEquals(5d, histogram.getBuckets().get(2).getKey()); - assertEquals(2, histogram.getBuckets().get(2).getDocCount()); - assertEquals(50d, histogram.getBuckets().get(3).getKey()); - assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertEquals(-5d, histogram.getBuckets().get(1).getKey()); + assertEquals(0, histogram.getBuckets().get(1).getDocCount()); + assertEquals(0d, histogram.getBuckets().get(2).getKey()); + assertEquals(1, histogram.getBuckets().get(2).getDocCount()); + assertEquals(5d, histogram.getBuckets().get(3).getKey()); + assertEquals(2, histogram.getBuckets().get(3).getDocCount()); + assertEquals(10d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d, histogram.getBuckets().get(5).getKey()); + assertEquals(1, histogram.getBuckets().get(5).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -84,7 +88,7 @@ public void testLongs() throws Exception { public void testDoubles() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 50.1}) { + for (double value : new double[] {9.3, 3.2, -10, -6.5, 5.3, 15.1}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", NumericUtils.doubleToSortableLong(value))); w.addDocument(doc); @@ -95,16 +99,21 @@ public void testDoubles() throws Exception { .interval(5); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); - assertEquals(4, histogram.getBuckets().size()); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); + assertEquals(6, histogram.getBuckets().size()); + assertEquals(6, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); - assertEquals(0d, histogram.getBuckets().get(1).getKey()); - assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - assertEquals(5d, histogram.getBuckets().get(2).getKey()); - assertEquals(2, histogram.getBuckets().get(2).getDocCount()); - assertEquals(50d, histogram.getBuckets().get(3).getKey()); - assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertEquals(-5d, histogram.getBuckets().get(1).getKey()); + assertEquals(0, histogram.getBuckets().get(1).getDocCount()); + assertEquals(0d, histogram.getBuckets().get(2).getKey()); + assertEquals(1, histogram.getBuckets().get(2).getDocCount()); + assertEquals(5d, histogram.getBuckets().get(3).getKey()); + assertEquals(2, histogram.getBuckets().get(3).getDocCount()); + assertEquals(10d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d, histogram.getBuckets().get(5).getKey()); + assertEquals(1, histogram.getBuckets().get(5).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -130,10 +139,6 @@ public void testDates() throws Exception { RandomIndexWriter indexWriter = new RandomIndexWriter(random(), dir)) { Document document = new Document(); for (String date : dataset) { - if (frequently()) { - indexWriter.commit(); - } - long instant = fieldType.parse(date); document.add(new SortedNumericDocValuesField(fieldName, instant)); indexWriter.addDocument(document); @@ -145,7 +150,7 @@ public void testDates() throws Exception { .interval(1000 * 60 * 60 * 24); try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -165,16 +170,20 @@ public void testIrrationalInterval() throws Exception { .interval(Math.PI); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); - assertEquals(4, histogram.getBuckets().size()); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); + assertEquals(6, histogram.getBuckets().size()); assertEquals(-4 * Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); assertEquals(-3 * Math.PI, histogram.getBuckets().get(1).getKey()); assertEquals(1, histogram.getBuckets().get(1).getDocCount()); - assertEquals(0d, histogram.getBuckets().get(2).getKey()); - assertEquals(2, histogram.getBuckets().get(2).getDocCount()); - assertEquals(Math.PI, histogram.getBuckets().get(3).getKey()); - assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertEquals(-2 * Math.PI, histogram.getBuckets().get(2).getKey()); + assertEquals(0, histogram.getBuckets().get(2).getDocCount()); + assertEquals(-Math.PI, histogram.getBuckets().get(3).getKey()); + assertEquals(0, histogram.getBuckets().get(3).getDocCount()); + assertEquals(0d, histogram.getBuckets().get(4).getKey()); + assertEquals(2, histogram.getBuckets().get(4).getDocCount()); + assertEquals(Math.PI, histogram.getBuckets().get(5).getKey()); + assertEquals(1, histogram.getBuckets().get(5).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -209,7 +218,7 @@ public void testMinDocCount() throws Exception { public void testMissing() throws Exception { try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { - for (long value : new long[] {7, 3, -10, -6, 5, 50}) { + for (long value : new long[] {7, 3, -10, -6, 5, 15}) { Document doc = new Document(); doc.add(new SortedNumericDocValuesField("field", value)); w.addDocument(doc); @@ -222,16 +231,20 @@ public void testMissing() throws Exception { .missing(2d); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); - assertEquals(4, histogram.getBuckets().size()); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, longField("field")); + assertEquals(6, histogram.getBuckets().size()); assertEquals(-10d, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); - assertEquals(0d, histogram.getBuckets().get(1).getKey()); - assertEquals(7, histogram.getBuckets().get(1).getDocCount()); - assertEquals(5d, histogram.getBuckets().get(2).getKey()); - assertEquals(2, histogram.getBuckets().get(2).getDocCount()); - assertEquals(50d, histogram.getBuckets().get(3).getKey()); - assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertEquals(-5d, histogram.getBuckets().get(1).getKey()); + assertEquals(0, histogram.getBuckets().get(1).getDocCount()); + assertEquals(0d, histogram.getBuckets().get(2).getKey()); + assertEquals(7, histogram.getBuckets().get(2).getDocCount()); + assertEquals(5d, histogram.getBuckets().get(3).getKey()); + assertEquals(2, histogram.getBuckets().get(3).getDocCount()); + assertEquals(10d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d, histogram.getBuckets().get(5).getKey()); + assertEquals(1, histogram.getBuckets().get(5).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -252,7 +265,7 @@ public void testMissingUnmappedField() throws Exception { MappedFieldType type = null; try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, type); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, type); assertEquals(1, histogram.getBuckets().size()); @@ -280,7 +293,7 @@ public void testMissingUnmappedFieldBadType() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); Throwable t = expectThrows(IllegalArgumentException.class, () -> { - search(searcher, new MatchAllDocsQuery(), aggBuilder, type); + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, type); }); // This throws a number format exception (which is a subclass of IllegalArgumentException) and might be ok? assertThat(t.getMessage(), containsString(missingValue)); @@ -304,7 +317,7 @@ public void testIncorrectFieldType() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); expectThrows(IllegalArgumentException.class, () -> { - search(searcher, new MatchAllDocsQuery(), aggBuilder, keywordField("field")); + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, keywordField("field")); }); } } @@ -326,14 +339,16 @@ public void testOffset() throws Exception { .offset(Math.PI); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); - assertEquals(3, histogram.getBuckets().size()); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); + assertEquals(4, histogram.getBuckets().size()); assertEquals(-10 + Math.PI, histogram.getBuckets().get(0).getKey()); assertEquals(2, histogram.getBuckets().get(0).getDocCount()); - assertEquals(Math.PI, histogram.getBuckets().get(1).getKey()); - assertEquals(2, histogram.getBuckets().get(1).getDocCount()); - assertEquals(5 + Math.PI, histogram.getBuckets().get(2).getKey()); - assertEquals(1, histogram.getBuckets().get(2).getDocCount()); + assertEquals(-5 + Math.PI, histogram.getBuckets().get(1).getKey()); + assertEquals(0, histogram.getBuckets().get(1).getDocCount()); + assertEquals(Math.PI, histogram.getBuckets().get(2).getKey()); + assertEquals(2, histogram.getBuckets().get(2).getDocCount()); + assertEquals(5 + Math.PI, histogram.getBuckets().get(3).getKey()); + assertEquals(1, histogram.getBuckets().get(3).getDocCount()); assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } @@ -358,18 +373,21 @@ public void testRandomOffset() throws Exception { .offset(offset); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); - assertEquals(3, histogram.getBuckets().size()); + InternalHistogram histogram = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, doubleField("field")); + assertEquals(4, histogram.getBuckets().size()); assertEquals(-10 + expectedOffset, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); - assertEquals(expectedOffset, histogram.getBuckets().get(1).getKey()); - assertEquals(1, histogram.getBuckets().get(1).getDocCount()); + assertEquals(-5 + expectedOffset, histogram.getBuckets().get(1).getKey()); + assertEquals(0, histogram.getBuckets().get(1).getDocCount()); - assertEquals(5 + expectedOffset, histogram.getBuckets().get(2).getKey()); + assertEquals(expectedOffset, histogram.getBuckets().get(2).getKey()); assertEquals(1, histogram.getBuckets().get(2).getDocCount()); + assertEquals(5 + expectedOffset, histogram.getBuckets().get(3).getKey()); + assertEquals(1, histogram.getBuckets().get(3).getDocCount()); + assertTrue(AggregationInspectionHelper.hasValue(histogram)); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java index 990c8840c3f9e..312ca43c7286e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/RangeHistogramAggregatorTests.java @@ -57,7 +57,7 @@ public void testDoubles() throws Exception { new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -5, 0 new RangeFieldMapper.Range(rangeType, 4.2, 13.3, true, true), // bucket 0, 5, 10 - new RangeFieldMapper.Range(rangeType, 42.5, 49.3, true, true), // bucket 40, 45 + new RangeFieldMapper.Range(rangeType, 22.5, 29.3, true, true), // bucket 20, 25 }) { Document doc = new Document(); BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range)); @@ -71,8 +71,9 @@ public void testDoubles() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); - assertEquals(6, histogram.getBuckets().size()); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -86,11 +87,14 @@ public void testDoubles() throws Exception { assertEquals(10d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); - assertEquals(40d, histogram.getBuckets().get(4).getKey()); - assertEquals(1, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); - assertEquals(45d, histogram.getBuckets().get(5).getKey()); + assertEquals(20d, histogram.getBuckets().get(5).getKey()); assertEquals(1, histogram.getBuckets().get(5).getDocCount()); + + assertEquals(25d, histogram.getBuckets().get(6).getKey()); + assertEquals(1, histogram.getBuckets().get(6).getDocCount()); } } } @@ -103,7 +107,7 @@ public void testLongs() throws Exception { new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0 new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), // bucket 0, 5, 10 - new RangeFieldMapper.Range(rangeType, 42L, 49L, true, true), // bucket 40, 45 + new RangeFieldMapper.Range(rangeType, 22L, 29L, true, true), // bucket 20, 25 }) { Document doc = new Document(); BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range)); @@ -117,8 +121,9 @@ public void testLongs() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); - assertEquals(6, histogram.getBuckets().size()); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -132,11 +137,14 @@ public void testLongs() throws Exception { assertEquals(10d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); - assertEquals(40d, histogram.getBuckets().get(4).getKey()); - assertEquals(1, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); - assertEquals(45d, histogram.getBuckets().get(5).getKey()); + assertEquals(20d, histogram.getBuckets().get(5).getKey()); assertEquals(1, histogram.getBuckets().get(5).getDocCount()); + + assertEquals(25d, histogram.getBuckets().get(6).getKey()); + assertEquals(1, histogram.getBuckets().get(6).getDocCount()); } } } @@ -150,7 +158,7 @@ public void testMultipleRanges() throws Exception { new RangeFieldMapper.Range(rangeType, 1L, 5L, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3L, 4L, true, true), // bucket -5, 0 new RangeFieldMapper.Range(rangeType, 4L, 13L, true, true), // bucket 0, 5, 10 - new RangeFieldMapper.Range(rangeType, 42L, 49L, true, true) // bucket 40, 45 + new RangeFieldMapper.Range(rangeType, 22L, 29L, true, true) // bucket 20, 25, 30 )); doc.add(new BinaryDocValuesField("field", encodedRange)); w.addDocument(doc); @@ -161,8 +169,9 @@ public void testMultipleRanges() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); - assertEquals(6, histogram.getBuckets().size()); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -176,11 +185,14 @@ public void testMultipleRanges() throws Exception { assertEquals(10d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); - assertEquals(40d, histogram.getBuckets().get(4).getKey()); - assertEquals(1, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); - assertEquals(45d, histogram.getBuckets().get(5).getKey()); + assertEquals(20d, histogram.getBuckets().get(5).getKey()); assertEquals(1, histogram.getBuckets().get(5).getDocCount()); + + assertEquals(25d, histogram.getBuckets().get(6).getKey()); + assertEquals(1, histogram.getBuckets().get(6).getDocCount()); } } @@ -206,7 +218,8 @@ public void testMultipleRangesLotsOfOverlap() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(3, histogram.getBuckets().size()); assertEquals(0d, histogram.getBuckets().get(0).getKey()); @@ -243,7 +256,8 @@ public void testLongsIrrationalInterval() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); assertEquals(6, histogram.getBuckets().size()); assertEquals(-1 * Math.PI, histogram.getBuckets().get(0).getKey()); @@ -315,7 +329,7 @@ public void testOffset() throws Exception { new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket -1, 4 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -6 -1 4 new RangeFieldMapper.Range(rangeType, 4.2, 13.3, true, true), // bucket 4, 9 - new RangeFieldMapper.Range(rangeType, 42.5, 49.3, true, true), // bucket 39, 44, 49 + new RangeFieldMapper.Range(rangeType, 22.5, 29.3, true, true), // bucket 19, 24, 29 }) { Document doc = new Document(); BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range)); @@ -330,8 +344,9 @@ public void testOffset() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); - //assertEquals(7, histogram.getBuckets().size()); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + assertEquals(8, histogram.getBuckets().size()); assertEquals(-6d, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -345,14 +360,17 @@ public void testOffset() throws Exception { assertEquals(9d, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); - assertEquals(39d, histogram.getBuckets().get(4).getKey()); - assertEquals(1, histogram.getBuckets().get(4).getDocCount()); + assertEquals(14d, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); - assertEquals(44d, histogram.getBuckets().get(5).getKey()); + assertEquals(19d, histogram.getBuckets().get(5).getKey()); assertEquals(1, histogram.getBuckets().get(5).getDocCount()); - assertEquals(49d, histogram.getBuckets().get(6).getKey()); + assertEquals(24d, histogram.getBuckets().get(6).getKey()); assertEquals(1, histogram.getBuckets().get(6).getDocCount()); + + assertEquals(29d, histogram.getBuckets().get(7).getKey()); + assertEquals(1, histogram.getBuckets().get(7).getDocCount()); } } } @@ -365,7 +383,7 @@ public void testOffsetGtInterval() throws Exception { new RangeFieldMapper.Range(rangeType, 1.0D, 5.0D, true, true), // bucket 0 5 new RangeFieldMapper.Range(rangeType, -3.1, 4.2, true, true), // bucket -5, 0 new RangeFieldMapper.Range(rangeType, 4.2, 13.3, true, true), // bucket 0, 5, 10 - new RangeFieldMapper.Range(rangeType, 42.5, 49.3, true, true), // bucket 40, 45 + new RangeFieldMapper.Range(rangeType, 22.5, 29.3, true, true), // bucket 20, 25 }) { Document doc = new Document(); BytesRef encodedRange = rangeType.encodeRanges(Collections.singleton(range)); @@ -386,8 +404,9 @@ public void testOffsetGtInterval() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalHistogram histogram = search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); - assertEquals(6, histogram.getBuckets().size()); + InternalHistogram histogram = + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType)); + assertEquals(7, histogram.getBuckets().size()); assertEquals(-5d + expectedOffset, histogram.getBuckets().get(0).getKey()); assertEquals(1, histogram.getBuckets().get(0).getDocCount()); @@ -401,11 +420,14 @@ public void testOffsetGtInterval() throws Exception { assertEquals(10d + expectedOffset, histogram.getBuckets().get(3).getKey()); assertEquals(1, histogram.getBuckets().get(3).getDocCount()); - assertEquals(40d + expectedOffset, histogram.getBuckets().get(4).getKey()); - assertEquals(1, histogram.getBuckets().get(4).getDocCount()); + assertEquals(15d + expectedOffset, histogram.getBuckets().get(4).getKey()); + assertEquals(0, histogram.getBuckets().get(4).getDocCount()); - assertEquals(45d + expectedOffset, histogram.getBuckets().get(5).getKey()); + assertEquals(20d + expectedOffset, histogram.getBuckets().get(5).getKey()); assertEquals(1, histogram.getBuckets().get(5).getDocCount()); + + assertEquals(25d + expectedOffset, histogram.getBuckets().get(6).getKey()); + assertEquals(1, histogram.getBuckets().get(6).getDocCount()); } } } @@ -429,7 +451,7 @@ public void testIpRangesUnsupported() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); Exception e = expectThrows(IllegalArgumentException.class, () -> - search(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType))); + searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, rangeField("field", rangeType))); assertThat(e.getMessage(), equalTo("Expected numeric range type but found non-numeric range [ip_range]")); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java index 8b24e92f6bb0e..c8fca52645a2c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/VariableWidthHistogramAggregatorTests.java @@ -65,7 +65,7 @@ public class VariableWidthHistogramAggregatorTests extends AggregatorTestCase { public void testNoDocs() throws Exception{ final List dataset = Arrays.asList(); - testBothCases(DEFAULT_QUERY, dataset, true, + testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(6).setInitialBuffer(4), histogram -> { final List buckets = histogram.getBuckets(); @@ -87,7 +87,7 @@ public void testMoreClustersThanDocs() throws Exception { expectedMins.put(-3d, -3d); expectedMins.put(10d, 10d); - testBothCases(DEFAULT_QUERY, dataset, true, + testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4), histogram -> { final List buckets = histogram.getBuckets(); @@ -164,7 +164,7 @@ public void testDoubles() throws Exception { expectedMaxesOnlySearch.put(8.8, 8.8); testSearchCase(DEFAULT_QUERY, dataset, false, - aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(6).setInitialBuffer(4), + aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4).setShardSize(6).setInitialBuffer(4), histogram -> { final List buckets = histogram.getBuckets(); assertEquals(expectedCentroidsOnlySearch.size(), buckets.size()); @@ -198,7 +198,7 @@ public void testDoubles() throws Exception { expectedMaxesSearchReduce.put(5.3, 5.9); expectedMaxesSearchReduce.put(8.8, 8.8); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, false, + testSearchCase(DEFAULT_QUERY, dataset, false, aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(4).setShardSize(6).setInitialBuffer(4), histogram -> { final List buckets = histogram.getBuckets(); @@ -220,16 +220,12 @@ public void testNewBucketCreation() throws Exception { final List dataset = Arrays.asList(-1, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 40, 30, 25, 32, 36, 80, 50, 75, 60); double doubleError = 1d / 10000d; - // Search (no reduce) - - // Expected clusters: [ (-1), (1), (3), (5), (7), (9), (11), (13), (15), (17), - // (19), (25, 30, 32), (36, 40, 50), (60), (75, 80) ] - // Corresponding keys (centroids): [ -1, 1, 3, ..., 17, 19, 29, 42, 77.5] - // Note: New buckets are created for 30, 50, and 80 because they are distant from the other buckets - final List keys = Arrays.asList(-1d, 1d, 3d, 5d, 7d, 9d, 11d, 13d, 15d, 17d, 19d, 29d, 42d, 60d, 77.5d); - final List mins = Arrays.asList(-1d, 1d, 3d, 5d, 7d, 9d, 11d, 13d, 15d, 17d, 19d, 25d, 36d, 60d, 75d); - final List maxes = Arrays.asList(-1d, 1d, 3d, 5d, 7d, 9d, 11d, 13d, 15d, 17d, 19d, 32d, 50d, 60d, 80d); - final List docCounts = Arrays.asList(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 1, 2); + // Expected clusters: [ (-1, 1), (3, 5), (7, 9), (11, 13), (15, 17), + // (19), (25), (30), (32), (36), (40), (50), (60), (75), (80) ] + final List keys = Arrays.asList(0d, 4d, 8d, 12d, 16d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d); + final List mins = Arrays.asList(-1d, 3d, 7d, 11d, 15d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d); + final List maxes = Arrays.asList(1d, 5d, 9d, 13d, 17d, 19d, 25d, 30d, 32d, 36d, 40d, 50d, 60d, 75d, 80d); + final List docCounts = Arrays.asList(2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1); assert keys.size() == docCounts.size() && keys.size() == keys.size(); final Map expectedDocCountOnlySearch = new HashMap<>(); @@ -242,7 +238,7 @@ public void testNewBucketCreation() throws Exception { } testSearchCase(DEFAULT_QUERY, dataset, false, - aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(16).setInitialBuffer(12), + aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15), histogram -> { final List buckets = histogram.getBuckets(); assertEquals(expectedDocCountOnlySearch.size(), buckets.size()); @@ -267,7 +263,7 @@ public void testNewBucketCreation() throws Exception { } testSearchCase(DEFAULT_QUERY, dataset.stream().map(n -> Double.valueOf(n.doubleValue() * Long.MAX_VALUE)).collect(toList()), false, - aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(2).setShardSize(16).setInitialBuffer(12), + aggregation -> aggregation.field(NUMERIC_FIELD).setNumBuckets(15), histogram -> { final List buckets = histogram.getBuckets(); assertEquals(expectedDocCountOnlySearch.size(), buckets.size()); @@ -284,28 +280,22 @@ public void testNewBucketCreation() throws Exception { // There should not be more than `shard_size` documents on a node, even when very distant documents appear public void testNewBucketLimit() throws Exception{ - final List dataset = Arrays.asList(1,2,3,4,5, 10, 20, 50, 100, 5400, -900); + final List dataset = Arrays.asList(1, 2, 3, 4, 5, 10, 20, 50, 100, 5400, -900); double doubleError = 1d / 10000d; - // Expected clusters: [ (-900, 1, 2), (3, 4), (5), (10, 20, 50, 100, 5400)] - // Corresponding keys (centroids): [ -299, 3.5, 5, 1116] + // Expected clusters: [ (-900, 1, 2, 3, 4, 5), (10, 20, 50, 100, 5400)] + // Corresponding keys (centroids): [ -147.5, 1116] final Map expectedDocCount = new HashMap<>(); - expectedDocCount.put(-299d, 3); - expectedDocCount.put(3.5d, 2); - expectedDocCount.put(5d, 1); - expectedDocCount.put(1116d, 5); + expectedDocCount.put(-147.5d, 6); + expectedDocCount.put(1116.0d, 5); final Map expectedMins = new HashMap<>(); - expectedMins.put(-299d, -900d); - expectedMins.put(3.5d, 3d); - expectedMins.put(5d, 5d); - expectedMins.put(1116d, 10d); + expectedMins.put(-147.5d, -900d); + expectedMins.put(1116.0d, 10d); final Map expectedMaxes = new HashMap<>(); - expectedMaxes.put(-299d, 2d); - expectedMaxes.put(3.5d, 4d); - expectedMaxes.put(5d, 5d); - expectedMaxes.put(1116d, 5400d); + expectedMaxes.put(-147.5d, 5d); + expectedMaxes.put(1116.0d, 5400d); testSearchCase(DEFAULT_QUERY, dataset, false, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(2).setShardSize(4).setInitialBuffer(5), @@ -325,7 +315,7 @@ public void testNewBucketLimit() throws Exception{ public void testSimpleSubAggregations() throws IOException{ final List dataset = Arrays.asList(5, 1, 9, 2, 8); - testSearchAndReduceCase(DEFAULT_QUERY, dataset, false, + testSearchCase(DEFAULT_QUERY, dataset, false, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(3) .setInitialBuffer(3) @@ -426,7 +416,7 @@ public void testMultipleSegments() throws IOException{ // To account for this case of a document switching clusters, we check that each cluster centroid is within // a certain range, rather than asserting exact values. - testSearchAndReduceCase(DEFAULT_QUERY, dataset, true, + testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD) .setNumBuckets(2) .setInitialBuffer(4) @@ -508,12 +498,11 @@ public void testSmallShardSize() throws Exception { public void testHugeShardSize() throws Exception { final List dataset = Arrays.asList(1, 2, 3); - testBothCases(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setShardSize(1000000000), histogram -> { - assertThat( + testSearchCase(DEFAULT_QUERY, dataset, true, aggregation -> aggregation.field(NUMERIC_FIELD).setShardSize(1000000000), + histogram -> assertThat( histogram.getBuckets().stream().map(InternalVariableWidthHistogram.Bucket::getKey).collect(toList()), - equalTo(List.of(1.0, 2.0, 3.0)) - ); - }); + equalTo(List.of(1.0, 2.0, 3.0))) + ); } public void testSmallInitialBuffer() throws Exception { @@ -529,7 +518,7 @@ public void testSmallInitialBuffer() throws Exception { public void testOutOfOrderInitialBuffer() throws Exception { final List dataset = Arrays.asList(1, 2, 3); - testBothCases( + testSearchCase( DEFAULT_QUERY, dataset, true, @@ -553,26 +542,6 @@ public void testDefaultInitialBufferDependsOnNumBuckets() throws Exception { assertThat(new VariableWidthHistogramAggregationBuilder("test").setNumBuckets(3).getInitialBuffer(), equalTo(1500)); } - private void testSearchCase(final Query query, final List dataset, boolean multipleSegments, - final Consumer configure, - final Consumer verify) throws IOException { - executeTestCase(false, query, dataset, multipleSegments, configure, verify); - } - - - private void testSearchAndReduceCase(final Query query, final List dataset, boolean multipleSegments, - final Consumer configure, - final Consumer verify) throws IOException { - executeTestCase(true, query, dataset, multipleSegments, configure, verify); - } - - private void testBothCases(final Query query, final List dataset, boolean multipleSegments, - final Consumer configure, - final Consumer verify) throws IOException { - executeTestCase(true, query, dataset, multipleSegments, configure, verify); - executeTestCase(false, query, dataset, multipleSegments, configure, verify); - } - @Override protected IndexSettings createIndexSettings() { final Settings nodeSettings = Settings.builder() @@ -588,10 +557,9 @@ protected IndexSettings createIndexSettings() { ); } - private void executeTestCase(final boolean reduced, final Query query, - final List dataset, boolean multipleSegments, - final Consumer configure, - final Consumer verify) throws IOException { + private void testSearchCase(final Query query, final List dataset, boolean multipleSegments, + final Consumer configure, + final Consumer verify) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { indexSampleData(dataset, indexWriter, multipleSegments); @@ -619,12 +587,7 @@ private void executeTestCase(final boolean reduced, final Query query, - final InternalVariableWidthHistogram histogram; - if (reduced) { - histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); - } else { - histogram = search(indexSearcher, query, aggregationBuilder, fieldType); - } + final InternalVariableWidthHistogram histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); verify.accept(histogram); } } @@ -645,10 +608,6 @@ private void indexSampleData(List dataset, RandomIndexWriter indexWriter // Create multiple segments in the index final Document document = new Document(); for (final Number doc : dataset) { - if (frequently()) { - indexWriter.commit(); - } - long fieldVal = convertDocumentToSortableValue(doc); document.add(new SortedNumericDocValuesField(NUMERIC_FIELD, fieldVal)); indexWriter.addDocument(document); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java index 517f1bd07c8ae..d437e18788610 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/missing/MissingAggregatorTests.java @@ -393,17 +393,6 @@ private void testCase(Query query, CheckedConsumer writeIndex, Consumer verify, Collection fieldTypes) throws IOException { - testCaseWithReduce(query, builder, writeIndex, verify, fieldTypes, false); - testCaseWithReduce(query, builder, writeIndex, verify, fieldTypes, true); - } - - private void testCaseWithReduce(Query query, - MissingAggregationBuilder builder, - CheckedConsumer writeIndex, - Consumer verify, - Collection fieldTypes, - boolean reduced) throws IOException { - try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { writeIndex.accept(indexWriter); @@ -412,12 +401,7 @@ private void testCaseWithReduce(Query query, try (IndexReader indexReader = DirectoryReader.open(directory)) { final IndexSearcher indexSearcher = newSearcher(indexReader, true, true); final MappedFieldType[] fieldTypesArray = fieldTypes.toArray(new MappedFieldType[0]); - final InternalMissing missing; - if (reduced) { - missing = searchAndReduce(indexSearcher, query, builder, fieldTypesArray); - } else { - missing = search(indexSearcher, query, builder, fieldTypesArray); - } + final InternalMissing missing = searchAndReduce(indexSearcher, query, builder, fieldTypesArray); verify.accept(missing); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index fde8b7e2e3518..078450445f546 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -151,7 +151,7 @@ public void testNoDocs() throws IOException { nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(NESTED_AGG, nested.getName()); @@ -196,7 +196,7 @@ public void testSingleNestingMax() throws IOException { nestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -248,7 +248,7 @@ public void testDoubleNestingMax() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -303,7 +303,7 @@ public void testOrphanedDocs() throws IOException { nestedBuilder.subAggregation(sumAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - InternalNested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -386,7 +386,7 @@ public void testResetRootDocId() throws Exception { bq.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bq.add(new TermQuery(new Term(IdFieldMapper.NAME, Uid.encodeId("2"))), BooleanClause.Occur.MUST_NOT); - InternalNested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new ConstantScoreQuery(bq.build()), nestedBuilder, fieldType); assertEquals(NESTED_AGG, nested.getName()); @@ -424,7 +424,7 @@ public void testNestedOrdering() throws IOException { nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = search(newSearcher(indexReader, false, true), + Terms terms = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); assertEquals(7, terms.getBuckets().size()); @@ -473,7 +473,8 @@ public void testNestedOrdering() throws IOException { nestedBuilder.subAggregation(maxAgg); termsBuilder.subAggregation(nestedBuilder); - terms = search(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); + terms = searchAndReduce(newSearcher(indexReader, false, true), + new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); assertEquals(7, terms.getBuckets().size()); assertEquals("authors", terms.getName()); @@ -561,7 +562,7 @@ public void testNestedOrdering_random() throws IOException { nestedBuilder.subAggregation(minAgg); termsBuilder.subAggregation(nestedBuilder); - Terms terms = search(newSearcher(indexReader, false, true), + Terms terms = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), termsBuilder, fieldType1, fieldType2); assertEquals(books.size(), terms.getBuckets().size()); @@ -658,7 +659,7 @@ public void testPreGetChildLeafCollectors() throws IOException { MappedFieldType fieldType1 = new KeywordFieldMapper.KeywordFieldType("key"); MappedFieldType fieldType2 = new KeywordFieldMapper.KeywordFieldType("value"); - Filter filter = search(newSearcher(indexReader, false, true), + Filter filter = searchAndReduce(newSearcher(indexReader, false, true), Queries.newNonNestedFilter(), filterAggregationBuilder, fieldType1, fieldType2); assertEquals("filterAgg", filter.getName()); @@ -719,9 +720,9 @@ public void testFieldAlias() throws IOException { NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( max(MAX_AGG_NAME).field(VALUE_FIELD_NAME + "-alias")); - InternalNested nested = search(newSearcher(indexReader, false, true), + InternalNested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); - Nested aliasNested = search(newSearcher(indexReader, false, true), + Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); assertEquals(nested, aliasNested); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java index 17e6c8e313f7f..dbcfa73d895b8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregatorTests.java @@ -98,7 +98,7 @@ public void testNoDocs() throws IOException { reverseNestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - Nested nested = search(newSearcher(indexReader, false, true), + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); ReverseNested reverseNested = (ReverseNested) ((InternalAggregation)nested).getProperty(REVERSE_AGG_NAME); @@ -160,7 +160,7 @@ public void testMaxFromParentDocs() throws IOException { reverseNestedBuilder.subAggregation(maxAgg); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(VALUE_FIELD_NAME, NumberFieldMapper.NumberType.LONG); - Nested nested = search(newSearcher(indexReader, false, true), + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), nestedBuilder, fieldType); assertEquals(expectedNestedDocs, nested.getDocCount()); @@ -224,9 +224,9 @@ public void testFieldAlias() throws IOException { NestedAggregationBuilder aliasAgg = nested(NESTED_AGG, NESTED_OBJECT).subAggregation( reverseNested(REVERSE_AGG_NAME).subAggregation(aliasMaxAgg)); - Nested nested = search(newSearcher(indexReader, false, true), + Nested nested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), agg, fieldType); - Nested aliasNested = search(newSearcher(indexReader, false, true), + Nested aliasNested = searchAndReduce(newSearcher(indexReader, false, true), new MatchAllDocsQuery(), aliasAgg, fieldType); ReverseNested reverseNested = nested.getAggregations().get(REVERSE_AGG_NAME); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java index f4dd09c6eebd7..b811e150fe407 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/IpRangeAggregatorTests.java @@ -113,7 +113,7 @@ public void testRanges() throws Exception { MappedFieldType fieldType = new IpFieldMapper.IpFieldType("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalBinaryRange range = search(searcher, new MatchAllDocsQuery(), builder, fieldType); + InternalBinaryRange range = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(numRanges, range.getBuckets().size()); for (int i = 0; i < range.getBuckets().size(); i++) { Tuple expected = requestedRanges[i]; @@ -148,7 +148,7 @@ public void testMissingUnmapped() throws Exception { .missing("192.168.100.42"); // Apparently we expect a string here try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalBinaryRange range = search(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null); + InternalBinaryRange range = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null); assertEquals(1, range.getBuckets().size()); } } @@ -169,7 +169,7 @@ public void testMissingUnmappedBadType() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); expectThrows(IllegalArgumentException.class, () -> { - search(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null); + searchAndReduce(searcher, new MatchAllDocsQuery(), builder, (MappedFieldType) null); }); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java index f482342d25449..15ab2cc21a969 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedSamplerTests.java @@ -192,7 +192,7 @@ private void testCase(IndexSearcher indexSearcher, MappedFieldType genreFieldTyp .shardSize(shardSize) .subAggregation(new TermsAggregationBuilder("terms").field("id")); - InternalSampler result = search(indexSearcher, query, builder, genreFieldType, idFieldType); + InternalSampler result = searchAndReduce(indexSearcher, query, builder, genreFieldType, idFieldType); verify.accept(result); } @@ -211,7 +211,7 @@ public void testDiversifiedSampler_noDocs() throws Exception { .field(genreFieldType.name()) .subAggregation(new TermsAggregationBuilder("terms").field("id")); - InternalSampler result = search(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType); + InternalSampler result = searchAndReduce(indexSearcher, new MatchAllDocsQuery(), builder, genreFieldType, idFieldType); Terms terms = result.getAggregations().get("terms"); assertEquals(0, terms.getBuckets().size()); indexReader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java index 8621475fa4c59..9f39e0e896d3a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/BinaryTermsAggregatorTests.java @@ -59,7 +59,7 @@ public class BinaryTermsAggregatorTests extends AggregatorTestCase { } public void testMatchNoDocs() throws IOException { - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(BINARY_FIELD), agg -> assertEquals(0, agg.getBuckets().size()), ValueType.STRING ); @@ -68,7 +68,7 @@ public void testMatchNoDocs() throws IOException { public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(BINARY_FIELD), agg -> { assertEquals(9, agg.getBuckets().size()); @@ -87,7 +87,7 @@ public void testBadIncludeExclude() throws IOException { // Make sure the include/exclude fails regardless of how the user tries to type hint the agg AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> testBothCases(new MatchNoDocsQuery(), dataset, + () -> testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), agg -> fail("test should have failed with exception"), null // default, no hint )); @@ -95,7 +95,7 @@ public void testBadIncludeExclude() throws IOException { "they can only be applied to string fields. Use an array of values for include/exclude clauses")); e = expectThrows(AggregationExecutionException.class, - () -> testBothCases(new MatchNoDocsQuery(), dataset, + () -> testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(BINARY_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), agg -> fail("test should have failed with exception"), ValueType.STRING // string type hint )); @@ -104,7 +104,7 @@ public void testBadIncludeExclude() throws IOException { } public void testBadUserValueTypeHint() throws IOException { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testBothCases(new MatchNoDocsQuery(), dataset, + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(BINARY_FIELD), agg -> fail("test should have failed with exception"), ValueType.NUMERIC // numeric type hint )); @@ -114,34 +114,10 @@ public void testBadUserValueTypeHint() throws IOException { private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify, ValueType valueType) throws IOException { - executeTestCase(false, query, dataset, configure, verify, valueType); - } - - private void testSearchAndReduceCase(Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - executeTestCase(true, query, dataset, configure, verify, valueType); - } - - private void testBothCases(Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - testSearchCase(query, dataset, configure, verify, valueType); - testSearchAndReduceCase(query, dataset, configure, verify, valueType); - } - - private void executeTestCase(boolean reduced, Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (Long value : dataset) { - if (frequently()) { - indexWriter.commit(); - } - document.add(new BinaryFieldMapper.CustomBinaryDocValuesField(BINARY_FIELD, Numbers.longToBytes(value))); indexWriter.addDocument(document); document.clear(); @@ -161,12 +137,7 @@ private void executeTestCase(boolean reduced, Query query, List dataset, MappedFieldType binaryFieldType = new BinaryFieldMapper.BinaryFieldType(BINARY_FIELD); - InternalMappedTerms rareTerms; - if (reduced) { - rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, binaryFieldType); - } else { - rareTerms = search(indexSearcher, query, aggregationBuilder, binaryFieldType); - } + InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, binaryFieldType); verify.accept(rareTerms); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java index c3a1be37d05ba..4e049c7e3a176 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/KeywordTermsAggregatorTests.java @@ -56,12 +56,12 @@ public class KeywordTermsAggregatorTests extends AggregatorTestCase { } public void testMatchNoDocs() throws IOException { - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> assertEquals(0, agg.getBuckets().size()), null // without type hint ); - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> assertEquals(0, agg.getBuckets().size()), ValueType.STRING // with type hint ); @@ -70,7 +70,7 @@ public void testMatchNoDocs() throws IOException { public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> { assertEquals(9, agg.getBuckets().size()); @@ -82,7 +82,7 @@ public void testMatchAllDocs() throws IOException { }, null // without type hint ); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD), agg -> { assertEquals(9, agg.getBuckets().size()); @@ -98,34 +98,10 @@ public void testMatchAllDocs() throws IOException { private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify, ValueType valueType) throws IOException { - executeTestCase(false, query, dataset, configure, verify, valueType); - } - - private void testSearchAndReduceCase(Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - executeTestCase(true, query, dataset, configure, verify, valueType); - } - - private void testBothCases(Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - testSearchCase(query, dataset, configure, verify, valueType); - testSearchAndReduceCase(query, dataset, configure, verify, valueType); - } - - private void executeTestCase(boolean reduced, Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (String value : dataset) { - if (frequently()) { - indexWriter.commit(); - } - document.add(new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef(value))); indexWriter.addDocument(document); document.clear(); @@ -145,12 +121,7 @@ private void executeTestCase(boolean reduced, Query query, List dataset, MappedFieldType keywordFieldType = new KeywordFieldMapper.KeywordFieldType(KEYWORD_FIELD); - InternalMappedTerms rareTerms; - if (reduced) { - rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordFieldType); - } else { - rareTerms = search(indexSearcher, query, aggregationBuilder, keywordFieldType); - } + InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, keywordFieldType); verify.accept(rareTerms); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java index 30e32c7aa84bc..174a8a1aed6e8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregatorTests.java @@ -59,12 +59,12 @@ public class NumericTermsAggregatorTests extends AggregatorTestCase { public void testMatchNoDocs() throws IOException { - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> assertEquals(0, agg.getBuckets().size()), null // without type hint ); - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> assertEquals(0, agg.getBuckets().size()), ValueType.NUMERIC // with type hint ); @@ -73,7 +73,7 @@ public void testMatchNoDocs() throws IOException { public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> { assertEquals(9, agg.getBuckets().size()); @@ -85,7 +85,7 @@ public void testMatchAllDocs() throws IOException { }, null //without type hint ); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD), agg -> { assertEquals(9, agg.getBuckets().size()); @@ -104,7 +104,7 @@ public void testBadIncludeExclude() throws IOException { // Numerics don't support any regex include/exclude, so should fail no matter what we do AggregationExecutionException e = expectThrows(AggregationExecutionException.class, - () -> testBothCases(new MatchNoDocsQuery(), dataset, + () -> testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), agg -> fail("test should have failed with exception"), null )); @@ -113,7 +113,7 @@ public void testBadIncludeExclude() throws IOException { "values for include/exclude clauses used to filter numeric fields")); e = expectThrows(AggregationExecutionException.class, - () -> testBothCases(new MatchNoDocsQuery(), dataset, + () -> testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(LONG_FIELD).includeExclude(includeExclude).format("yyyy-MM-dd"), agg -> fail("test should have failed with exception"), ValueType.NUMERIC // with type hint )); @@ -126,34 +126,10 @@ public void testBadIncludeExclude() throws IOException { private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify, ValueType valueType) throws IOException { - executeTestCase(false, query, dataset, configure, verify, valueType); - } - - private void testSearchAndReduceCase(Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - executeTestCase(true, query, dataset, configure, verify, valueType); - } - - private void testBothCases(Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - testSearchCase(query, dataset, configure, verify, valueType); - testSearchAndReduceCase(query, dataset, configure, verify, valueType); - } - - private void executeTestCase(boolean reduced, Query query, List dataset, - Consumer configure, - Consumer verify, ValueType valueType) throws IOException { - try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); for (Long value : dataset) { - if (frequently()) { - indexWriter.commit(); - } - document.add(new SortedNumericDocValuesField(LONG_FIELD, value)); document.add(new LongPoint(LONG_FIELD, value)); indexWriter.addDocument(document); @@ -175,12 +151,7 @@ private void executeTestCase(boolean reduced, Query query, List dataset, MappedFieldType longFieldType = new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG); - InternalMappedTerms rareTerms; - if (reduced) { - rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longFieldType); - } else { - rareTerms = search(indexSearcher, query, aggregationBuilder, longFieldType); - } + InternalMappedTerms rareTerms = searchAndReduce(indexSearcher, query, aggregationBuilder, longFieldType); verify.accept(rareTerms); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 17a61818e7768..78e8ca6f68147 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -99,11 +99,11 @@ public class RareTermsAggregatorTests extends AggregatorTestCase { } public void testMatchNoDocs() throws IOException { - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); - testBothCases(new MatchNoDocsQuery(), dataset, + testSearchCase(new MatchNoDocsQuery(), dataset, aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); @@ -112,7 +112,7 @@ public void testMatchNoDocs() throws IOException { public void testMatchAllDocs() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> { assertEquals(1, agg.getBuckets().size()); @@ -121,7 +121,7 @@ public void testMatchAllDocs() throws IOException { assertThat(bucket.getDocCount(), equalTo(1L)); } ); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> { assertEquals(1, agg.getBuckets().size()); @@ -144,7 +144,7 @@ public void testManyDocsOneRare() throws IOException { // The one rare term d.add(0L); - testSearchAndReduceCase(query, d, + testSearchCase(query, d, aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> { assertEquals(1, agg.getBuckets().size()); @@ -153,7 +153,7 @@ public void testManyDocsOneRare() throws IOException { assertThat(bucket.getDocCount(), equalTo(1L)); } ); - testSearchAndReduceCase(query, d, + testSearchCase(query, d, aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> { assertEquals(1, agg.getBuckets().size()); @@ -167,7 +167,7 @@ public void testManyDocsOneRare() throws IOException { public void testIncludeExclude() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(LONG_FIELD) .maxDocCount(2) // bump to 2 since we're only including "2" .includeExclude(new IncludeExclude(new long[]{2}, new long[]{})), @@ -178,7 +178,7 @@ public void testIncludeExclude() throws IOException { assertThat(bucket.getDocCount(), equalTo(2L)); } ); - testBothCases(query, dataset, + testSearchCase(query, dataset, aggregation -> aggregation.field(KEYWORD_FIELD) .maxDocCount(2) // bump to 2 since we're only including "2" .includeExclude(new IncludeExclude(new String[]{"2"}, new String[]{})), @@ -194,7 +194,7 @@ public void testIncludeExclude() throws IOException { public void testEmbeddedMaxAgg() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, aggregation -> { + testSearchCase(query, dataset, aggregation -> { MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD); aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(max); }, @@ -210,7 +210,7 @@ public void testEmbeddedMaxAgg() throws IOException { assertThat(((Max)(children.asList().get(0))).getValue(), equalTo(1.0)); } ); - testBothCases(query, dataset, aggregation -> { + testSearchCase(query, dataset, aggregation -> { MaxAggregationBuilder max = new MaxAggregationBuilder("the_max").field(LONG_FIELD); aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(max); }, @@ -240,11 +240,11 @@ public void testEmpty() throws IOException { agg -> assertEquals(0, agg.getBuckets().size()) ); - testSearchAndReduceCase(query, Collections.emptyList(), + testSearchCase(query, Collections.emptyList(), aggregation -> aggregation.field(LONG_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); - testSearchAndReduceCase(query, Collections.emptyList(), + testSearchCase(query, Collections.emptyList(), aggregation -> aggregation.field(KEYWORD_FIELD).maxDocCount(1), agg -> assertEquals(0, agg.getBuckets().size()) ); @@ -312,7 +312,7 @@ public void testRangeField() throws Exception { public void testNestedTerms() throws IOException { Query query = new MatchAllDocsQuery(); - testBothCases(query, dataset, aggregation -> { + testSearchCase(query, dataset, aggregation -> { TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms") .field(KEYWORD_FIELD); aggregation.field(LONG_FIELD).maxDocCount(1).subAggregation(terms); @@ -331,7 +331,7 @@ public void testNestedTerms() throws IOException { } ); - testBothCases(query, dataset, aggregation -> { + testSearchCase(query, dataset, aggregation -> { TermsAggregationBuilder terms = new TermsAggregationBuilder("the_terms") .field(KEYWORD_FIELD); aggregation.field(KEYWORD_FIELD).maxDocCount(1).subAggregation(terms); @@ -352,22 +352,20 @@ public void testNestedTerms() throws IOException { } public void testInsideTerms() throws IOException { - for (boolean reduce : new boolean[] {false, true}) { - for (String field : new String[] {KEYWORD_FIELD, LONG_FIELD}) { - AggregationBuilder builder = new TermsAggregationBuilder("terms").field("even_odd").subAggregation( - new RareTermsAggregationBuilder("rare").field(field).maxDocCount(2)); - StringTerms terms = (StringTerms) executeTestCase(reduce, new MatchAllDocsQuery(), dataset, builder); - - StringTerms.Bucket even = terms.getBucketByKey("even"); - InternalRareTerms evenRare = even.getAggregations().get("rare"); - assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("2")); - assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(2L)); - - StringTerms.Bucket odd = terms.getBucketByKey("odd"); - InternalRareTerms oddRare = odd.getAggregations().get("rare"); - assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("1")); - assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(1L)); - } + for (String field : new String[] {KEYWORD_FIELD, LONG_FIELD}) { + AggregationBuilder builder = new TermsAggregationBuilder("terms").field("even_odd").subAggregation( + new RareTermsAggregationBuilder("rare").field(field).maxDocCount(2)); + StringTerms terms = executeTestCase(new MatchAllDocsQuery(), dataset, builder); + + StringTerms.Bucket even = terms.getBucketByKey("even"); + InternalRareTerms evenRare = even.getAggregations().get("rare"); + assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("2")); + assertEquals(evenRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(2L)); + + StringTerms.Bucket odd = terms.getBucketByKey("odd"); + InternalRareTerms oddRare = odd.getAggregations().get("rare"); + assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getKeyAsString).collect(toList()), List.of("1")); + assertEquals(oddRare.getBuckets().stream().map(InternalRareTerms.Bucket::getDocCount).collect(toList()), List.of(1L)); } } @@ -530,34 +528,6 @@ private List generateDocsWithNested(String id, int value, int[] nested return documents; } - private void testSearchCase( - Query query, - List dataset, - Consumer configure, - Consumer> verify - ) throws IOException { - executeTestCase(false, query, dataset, configure, verify); - } - - private void testSearchAndReduceCase( - Query query, - List dataset, - Consumer configure, - Consumer> verify - ) throws IOException { - executeTestCase(true, query, dataset, configure, verify); - } - - private void testBothCases( - Query query, - List dataset, - Consumer configure, - Consumer> verify - ) throws IOException { - testSearchCase(query, dataset, configure, verify); - testSearchAndReduceCase(query, dataset, configure, verify); - } - @Override protected IndexSettings createIndexSettings() { Settings nodeSettings = Settings.builder() @@ -572,38 +542,27 @@ protected IndexSettings createIndexSettings() { ); } - private void executeTestCase(boolean reduced, Query query, List dataset, - Consumer configure, - Consumer> verify) throws IOException { + private void testSearchCase(Query query, + List dataset, + Consumer configure, + Consumer> verify) throws IOException { RareTermsAggregationBuilder aggregationBuilder = new RareTermsAggregationBuilder("_name"); if (configure != null) { configure.accept(aggregationBuilder); } - InternalMappedRareTerms result = (InternalMappedRareTerms) executeTestCase( - reduced, - query, - dataset, - aggregationBuilder - ); - verify.accept(result); + verify.accept(executeTestCase(query, dataset, aggregationBuilder)); + } - private InternalAggregation executeTestCase( - boolean reduced, - Query query, - List dataset, - AggregationBuilder aggregationBuilder - ) throws IOException { + private A executeTestCase(Query query, + List dataset, + AggregationBuilder aggregationBuilder) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { Document document = new Document(); List shuffledDataset = new ArrayList<>(dataset); Collections.shuffle(shuffledDataset, random()); for (Long value : shuffledDataset) { - if (frequently()) { - indexWriter.commit(); - } - document.add(new SortedNumericDocValuesField(LONG_FIELD, value)); document.add(new LongPoint(LONG_FIELD, value)); document.add(new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef(Long.toString(value)))); @@ -620,10 +579,7 @@ private InternalAggregation executeTestCase( keywordField(KEYWORD_FIELD), longField(LONG_FIELD), keywordField("even_odd")}; - if (reduced) { - return searchAndReduce(indexSearcher, query, aggregationBuilder, types); - } - return search(indexSearcher, query, aggregationBuilder, types); + return searchAndReduce(indexSearcher, query, aggregationBuilder, types); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java index 59bdda66dea41..e992eab92bf04 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/SignificantTermsAggregatorTests.java @@ -115,7 +115,7 @@ public void testSignificance() throws IOException { indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - addMixedTextDocs(textFieldType, w); + addMixedTextDocs(w); SignificantTermsAggregationBuilder sigAgg = new SignificantTermsAggregationBuilder("sig_text").field("text"); sigAgg.executionHint(randomExecutionHint()); @@ -259,7 +259,7 @@ public void testUnmapped() throws IOException { indexWriterConfig.setMaxBufferedDocs(100); indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - addMixedTextDocs(textFieldType, w); + addMixedTextDocs(w); // Attempt aggregation on unmapped field SignificantTermsAggregationBuilder sigAgg = new SignificantTermsAggregationBuilder("sig_text").field("unmapped_field"); @@ -328,7 +328,7 @@ public void testFieldAlias() throws IOException { indexWriterConfig.setRAMBufferSizeMB(100); // flush on open to have a single segment try (Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, indexWriterConfig)) { - addMixedTextDocs(textFieldType, w); + addMixedTextDocs(w); SignificantTermsAggregationBuilder agg = significantTerms("sig_text").field("text"); SignificantTermsAggregationBuilder aliasAgg = significantTerms("sig_text").field("text-alias"); @@ -389,7 +389,7 @@ private void testAllDocsWithoutStringField(String executionHint) throws IOExcept IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f") .executionHint(executionHint); - SignificantStringTerms result = search(searcher, new MatchAllDocsQuery(), request, keywordField("f")); + SignificantStringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, keywordField("f")); assertThat(result.getSubsetSize(), equalTo(1L)); } } @@ -409,7 +409,7 @@ public void testAllDocsWithoutNumericField() throws IOException { try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f"); - SignificantLongTerms result = search(searcher, new MatchAllDocsQuery(), request, longField("f")); + SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, longField("f")); assertThat(result.getSubsetSize(), equalTo(1L)); } } @@ -441,7 +441,7 @@ private void testSomeDocsWithoutStringField(String executionHint) throws IOExcep IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f") .executionHint(executionHint); - SignificantStringTerms result = search(searcher, new MatchAllDocsQuery(), request, keywordField("f")); + SignificantStringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, keywordField("f")); assertThat(result.getSubsetSize(), equalTo(2L)); } } @@ -463,7 +463,7 @@ public void testSomeDocsWithoutNumericField() throws IOException { try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("f").field("f"); - SignificantLongTerms result = search(searcher, new MatchAllDocsQuery(), request, longField("f")); + SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, longField("f")); assertThat(result.getSubsetSize(), equalTo(2L)); } } @@ -495,14 +495,17 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); SignificantTermsAggregationBuilder kRequest = new SignificantTermsAggregationBuilder("k").field("k") + .minDocCount(0) .executionHint(executionHint); SignificantTermsAggregationBuilder jRequest = new SignificantTermsAggregationBuilder("j").field("j") + .minDocCount(0) .executionHint(executionHint) .subAggregation(kRequest); SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i") + .minDocCount(0) .executionHint(executionHint) .subAggregation(jRequest); - SignificantStringTerms result = search( + SignificantStringTerms result = searchAndReduce( searcher, new MatchAllDocsQuery(), request, @@ -549,10 +552,10 @@ public void testThreeLayerLong() throws IOException { } try (IndexReader reader = maybeWrapReaderEs(writer.getReader())) { IndexSearcher searcher = newIndexSearcher(reader); - SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i") - .subAggregation(new SignificantTermsAggregationBuilder("j").field("j") - .subAggregation(new SignificantTermsAggregationBuilder("k").field("k"))); - SignificantLongTerms result = search(searcher, new MatchAllDocsQuery(), request, + SignificantTermsAggregationBuilder request = new SignificantTermsAggregationBuilder("i").field("i").minDocCount(0) + .subAggregation(new SignificantTermsAggregationBuilder("j").field("j").minDocCount(0) + .subAggregation(new SignificantTermsAggregationBuilder("k").field("k").minDocCount(0))); + SignificantLongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, longField("i"), longField("j"), longField("k")); assertThat(result.getSubsetSize(), equalTo(1000L)); for (int i = 0; i < 10; i++) { @@ -576,7 +579,7 @@ public void testThreeLayerLong() throws IOException { } } - private void addMixedTextDocs(TextFieldType textFieldType, IndexWriter w) throws IOException { + private void addMixedTextDocs(IndexWriter w) throws IOException { for (int i = 0; i < 10; i++) { Document doc = new Document(); StringBuilder text = new StringBuilder("common "); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index ad2304c58d9f5..45a7db50cc0f5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1206,7 +1206,7 @@ public void testWithNestedAggregations() throws IOException { = new NumberFieldMapper.NumberFieldType("nested_value", NumberFieldMapper.NumberType.LONG); try (IndexReader indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { { - InternalNested result = search(newSearcher(indexReader, false, true), + InternalNested result = searchAndReduce(newSearcher(indexReader, false, true), // match root document only new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), nested, fieldType); InternalMultiBucketAggregation terms = result.getAggregations().get("terms"); @@ -1216,7 +1216,7 @@ public void testWithNestedAggregations() throws IOException { { FilterAggregationBuilder filter = new FilterAggregationBuilder("filter", new MatchAllQueryBuilder()) .subAggregation(nested); - InternalFilter result = search(newSearcher(indexReader, false, true), + InternalFilter result = searchAndReduce(newSearcher(indexReader, false, true), // match root document only new DocValuesFieldExistsQuery(PRIMARY_TERM_NAME), filter, fieldType); InternalNested nestedResult = result.getAggregations().get("nested"); @@ -1276,7 +1276,7 @@ private void threeLayerStringTestCase(String executionHint) throws IOException { TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i").executionHint(executionHint) .subAggregation(new TermsAggregationBuilder("j").field("j").executionHint(executionHint) .subAggregation(new TermsAggregationBuilder("k").field("k").executionHint(executionHint))); - StringTerms result = search(searcher, new MatchAllDocsQuery(), request, + StringTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, keywordField("i"), keywordField("j"), keywordField("k")); for (int i = 0; i < 10; i++) { StringTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); @@ -1316,7 +1316,7 @@ public void testThreeLayerLong() throws IOException { TermsAggregationBuilder request = new TermsAggregationBuilder("i").field("i") .subAggregation(new TermsAggregationBuilder("j").field("j") .subAggregation(new TermsAggregationBuilder("k").field("k"))); - LongTerms result = search(searcher, new MatchAllDocsQuery(), request, + LongTerms result = searchAndReduce(searcher, new MatchAllDocsQuery(), request, longField("i"), longField("j"), longField("k")); for (int i = 0; i < 10; i++) { LongTerms.Bucket iBucket = result.getBucketByKey(Integer.toString(i)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java index cdcb17cbbfc92..b106835a85bea 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregatorTests.java @@ -266,7 +266,7 @@ public void testCase(MappedFieldType ft, ExtendedStatsAggregationBuilder aggBuilder = new ExtendedStatsAggregationBuilder("my_agg") .field("field") .sigma(randomDoubleBetween(0, 10, true)); - InternalExtendedStats stats = search(searcher, new MatchAllDocsQuery(), aggBuilder, ft); + InternalExtendedStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, ft); verify.accept(stats); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java index ca1cb92e0fd5c..f9a23d703097f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregatorTests.java @@ -57,7 +57,7 @@ public void testEmpty() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -84,7 +84,7 @@ public void testUnmappedFieldWithDocs() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -117,7 +117,7 @@ public void testMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertThat(bounds.top, equalTo(lat)); assertThat(bounds.bottom, equalTo(lat)); assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY)); @@ -144,7 +144,7 @@ public void testInvalidMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); ElasticsearchParseException exception = expectThrows(ElasticsearchParseException.class, - () -> search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType)); + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType)); assertThat(exception.getMessage(), startsWith("unsupported symbol")); } } @@ -194,7 +194,7 @@ public void testRandom() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE)); assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE)); assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java index ab4a1fdd4a982..847c85fc1708e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregatorTests.java @@ -51,7 +51,7 @@ public void testEmpty() throws Exception { MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -71,11 +71,11 @@ public void testUnmapped() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -97,7 +97,7 @@ public void testUnmappedWithMissing() throws Exception { IndexSearcher searcher = new IndexSearcher(reader); MappedFieldType fieldType = new GeoPointFieldMapper.GeoPointFieldType("another_field"); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals(result.centroid(), expectedCentroid); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -161,7 +161,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro .field("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals("my_agg", result.getName()); GeoPoint centroid = result.centroid(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java index 1c16c8b4d24ff..d2cd0be2ed3d8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksAggregatorTests.java @@ -66,7 +66,7 @@ public void testEmpty() throws IOException { = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); @@ -90,7 +90,7 @@ public void testSimple() throws IOException { = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java index d03e64e1b120d..8d8b3da169249 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java @@ -138,8 +138,11 @@ public static void initMockScripts() { return state; }); SCRIPTS.put("reduceScript", params -> { - List states = (List) params.get("states"); - return states.stream().mapToInt(Integer::intValue).sum(); + List states = (List) params.get("states"); + return states.stream() + .filter(a -> a instanceof Number) + .map(a -> (Number) a) + .mapToInt(Number::intValue).sum(); }); SCRIPTS.put("initScriptScore", params -> { @@ -262,10 +265,11 @@ public void testNoDocs() throws IOException { try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + ScriptedMetric scriptedMetric = + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); - assertEquals(0, ((HashMap) scriptedMetric.aggregation()).size()); + assertEquals(0, scriptedMetric.aggregation()); } } } @@ -282,7 +286,7 @@ public void testScriptedMetricWithoutCombine() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)); + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)); assertEquals(exception.getMessage(), "[combineScript] must not be null: [scriptedMetric]"); } } @@ -300,7 +304,7 @@ public void testScriptedMetricWithoutReduce() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)); + () -> searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)); assertEquals(exception.getMessage(), "[reduceScript] must not be null: [scriptedMetric]"); } } @@ -321,7 +325,8 @@ public void testScriptedMetricWithCombine() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT) .combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + ScriptedMetric scriptedMetric = + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); assertEquals(numDocs, scriptedMetric.aggregation()); @@ -344,11 +349,12 @@ public void testScriptedMetricWithCombineAccessesScores() throws IOException { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE) .combineScript(COMBINE_SCRIPT_SCORE).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + ScriptedMetric scriptedMetric = + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); // all documents have score of 1.0 - assertEquals((double) numDocs, scriptedMetric.aggregation()); + assertEquals(numDocs, scriptedMetric.aggregation()); } } } @@ -359,13 +365,16 @@ public void testScriptParamsPassedThrough() throws IOException { for (int i = 0; i < 100; i++) { indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i))); } + // force a single aggregator + indexWriter.forceMerge(1); } try (IndexReader indexReader = DirectoryReader.open(directory)) { ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS) .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); - ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); + ScriptedMetric scriptedMetric = + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); // The result value depends on the script params. assertEquals(4896, scriptedMetric.aggregation()); @@ -414,7 +423,7 @@ public void testConflictingAggAndScriptParams() throws IOException { .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Parameter name \"" + CONFLICTING_PARAM_NAME + "\" used in both aggregation and script parameters", ex.getMessage()); @@ -433,7 +442,7 @@ public void testSelfReferencingAggStateAfterInit() throws IOException { .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs init script)", ex.getMessage()); } @@ -454,7 +463,7 @@ public void testSelfReferencingAggStateAfterMap() throws IOException { .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs map script)", ex.getMessage()); } @@ -472,7 +481,7 @@ public void testSelfReferencingAggStateAfterCombine() throws IOException { .combineScript(COMBINE_SCRIPT_SELF_REF).reduceScript(REDUCE_SCRIPT); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> - search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) + searchAndReduce(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) ); assertEquals("Iterable object is self-referencing itself (Scripted metric aggs combine script)", ex.getMessage()); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java index 5e11dbf867bad..bb3902ad14d1e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsAggregatorTests.java @@ -234,7 +234,7 @@ public void testPartiallyUnmapped() throws IOException { MultiReader multiReader = new MultiReader(mappedReader, unmappedReader)) { final IndexSearcher searcher = new IndexSearcher(multiReader); - final InternalStats stats = search(searcher, new MatchAllDocsQuery(), builder, ft); + final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, ft); assertEquals(expected.count, stats.getCount(), 0); assertEquals(expected.sum, stats.getSum(), TOLERANCE); @@ -433,7 +433,7 @@ private void testCase(StatsAggregationBuilder builder, try (IndexReader reader = indexWriter.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); final MappedFieldType[] fieldTypesArray = fieldTypes.toArray(new MappedFieldType[0]); - final InternalStats stats = search(searcher, new MatchAllDocsQuery(), builder, fieldTypesArray); + final InternalStats stats = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldTypesArray); verify.accept(stats); } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java index ca1aebb6be135..1bf2a74c1b0e5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/SumAggregatorTests.java @@ -256,7 +256,7 @@ public void testPartiallyUnmapped() throws IOException { final IndexSearcher searcher = newSearcher(multiReader, true, true); - final InternalSum internalSum = search(searcher, new MatchAllDocsQuery(), builder, fieldType); + final InternalSum internalSum = searchAndReduce(searcher, new MatchAllDocsQuery(), builder, fieldType); assertEquals(sum, internalSum.getValue(), 0d); assertTrue(AggregationInspectionHelper.hasValue(internalSum)); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java index 52c2bf92373ab..3d2acaff2840a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksAggregatorTests.java @@ -65,7 +65,7 @@ public void testEmpty() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = new MultiReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Percentile rank = ranks.iterator().next(); assertEquals(Double.NaN, rank.getPercent(), 0d); assertEquals(0.5, rank.getValue(), 0d); @@ -88,7 +88,7 @@ public void testSimple() throws IOException { MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.DOUBLE); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java index 632ba6f6f48d9..eb7d87c3b0241 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/DerivativeAggregatorTests.java @@ -354,9 +354,6 @@ public void testDocCountDerivativeWithGaps() throws IOException { indexWriter -> { Document document = new Document(); for (int i = 0; i < valueCounts_empty.length; i++) { - if (frequently()) { - indexWriter.commit(); - } for (int docs = 0; docs < valueCounts_empty[i]; docs++) { document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); indexWriter.addDocument(document); @@ -408,10 +405,6 @@ public void testDocCountDerivativeWithGaps_random() throws IOException { if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { - - if (frequently()) { - indexWriter.commit(); - } document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); indexWriter.addDocument(document); document.clear(); @@ -458,9 +451,6 @@ public void testDocCountDerivativeWithGaps_insertZeros() throws IOException { indexWriter -> { Document document = new Document(); for (int i = 0; i < valueCounts_empty.length; i++) { - if (frequently()) { - indexWriter.commit(); - } for (int docs = 0; docs < valueCounts_empty[i]; docs++) { document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); indexWriter.addDocument(document); @@ -514,9 +504,6 @@ public void testSingleValueAggDerivativeWithGaps() throws Exception { indexWriter -> { Document document = new Document(); for (int i = 0; i < valueCounts_empty.length; i++) { - if (frequently()) { - indexWriter.commit(); - } for (int docs = 0; docs < valueCounts_empty[i]; docs++) { document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); indexWriter.addDocument(document); @@ -634,10 +621,6 @@ public void testSingleValueAggDerivativeWithGaps_random() throws IOException { if (randomBoolean()) valueCounts_empty_rnd[i] = 0L; for (int docs = 0; docs < valueCounts_empty_rnd[i]; docs++) { - - if (frequently()) { - indexWriter.commit(); - } document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i)); indexWriter.addDocument(document); document.clear(); @@ -734,9 +717,6 @@ private void executeTestCase(Query query, AggregationBuilder aggBuilder, Consume executeTestCase(query, aggBuilder, verify, indexWriter -> { Document document = new Document(); for (int i = 0; i < numValueBuckets; i++) { - if (frequently()) { - indexWriter.commit(); - } for (int docs = 0; docs < valueCounts[i]; docs++) { document.add(new NumericDocValuesField(SINGLE_VALUED_FIELD_NAME, i * interval)); indexWriter.addDocument(document); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java index 9d07a3deffe0c..2f9f37b1d9999 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/MovFnAggrgatorTests.java @@ -131,10 +131,6 @@ private void executeTestCase(Query query, Document document = new Document(); int counter = 0; for (String date : datasetTimes) { - if (frequently()) { - indexWriter.commit(); - } - long instant = asLong(date); document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); document.add(new LongPoint(INSTANT_FIELD, instant)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java index b156facf4b4f4..433d208e29488 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceRegistryTests.java @@ -59,9 +59,12 @@ public void testAggregatorNotFoundException() { null, CoreValuesSourceType.BYTES ); - ValuesSourceRegistry registry = new ValuesSourceRegistry(Map.of("bogus", List.of()), null); + ValuesSourceRegistry registry = new ValuesSourceRegistry( + Map.of("bogus", List.of()), + Map.of(new ValuesSourceRegistry.RegistryKey<>("bogus", ValuesSourceRegistry.CompositeSupplier.class), List.of()), + null + ); expectThrows(IllegalArgumentException.class, () -> registry.getAggregator(fieldOnly, "bogus")); expectThrows(IllegalArgumentException.class, () -> registry.getAggregator(scriptOnly, "bogus")); } - } diff --git a/server/src/test/java/org/elasticsearch/transport/CompressibleBytesOutputStreamTests.java b/server/src/test/java/org/elasticsearch/transport/CompressibleBytesOutputStreamTests.java index aeb92dac73479..fd3112845063a 100644 --- a/server/src/test/java/org/elasticsearch/transport/CompressibleBytesOutputStreamTests.java +++ b/server/src/test/java/org/elasticsearch/transport/CompressibleBytesOutputStreamTests.java @@ -114,8 +114,10 @@ private static class ZeroOutOnCloseStream extends BytesStreamOutput { @Override public void close() { - int size = (int) bytes.size(); - bytes.set(0, new byte[size], 0, size); + if (bytes != null) { + int size = (int) bytes.size(); + bytes.set(0, new byte[size], 0, size); + } } } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 9994b7ad54370..da7c09997c528 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -142,6 +142,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.mockito.Matchers.anyObject; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; @@ -396,46 +397,6 @@ protected ScriptService getMockScriptService() { return null; } - protected A search(IndexSearcher searcher, - Query query, - AggregationBuilder builder, - MappedFieldType... fieldTypes) throws IOException { - return search(createIndexSettings(), searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); - } - - protected A search(IndexSettings indexSettings, - IndexSearcher searcher, - Query query, - AggregationBuilder builder, - MappedFieldType... fieldTypes) throws IOException { - return search(indexSettings, searcher, query, builder, DEFAULT_MAX_BUCKETS, fieldTypes); - } - - protected A search(IndexSearcher searcher, - Query query, - AggregationBuilder builder, - int maxBucket, - MappedFieldType... fieldTypes) throws IOException { - return search(createIndexSettings(), searcher, query, builder, maxBucket, fieldTypes); - } - - protected A search(IndexSettings indexSettings, - IndexSearcher searcher, - Query query, - AggregationBuilder builder, - int maxBucket, - MappedFieldType... fieldTypes) throws IOException { - MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); - C a = createAggregator(query, builder, searcher, indexSettings, bucketConsumer, fieldTypes); - a.preCollection(); - searcher.search(query, a); - a.postCollection(); - @SuppressWarnings("unchecked") - A result = (A) a.buildTopLevel(); - return result; - } - protected A searchAndReduce(IndexSearcher searcher, Query query, AggregationBuilder builder, @@ -471,74 +432,70 @@ protected A searchAndReduc int maxBucket, MappedFieldType... fieldTypes) throws IOException { final IndexReaderContext ctx = searcher.getTopReaderContext(); - - final ShardSearcher[] subSearchers; - if (ctx instanceof LeafReaderContext) { - subSearchers = new ShardSearcher[1]; - subSearchers[0] = new ShardSearcher((LeafReaderContext) ctx, ctx); - } else { - final CompositeReaderContext compCTX = (CompositeReaderContext) ctx; - final int size = compCTX.leaves().size(); - subSearchers = new ShardSearcher[size]; - for(int searcherIDX=0;searcherIDX aggs = new ArrayList<>(); Query rewritten = searcher.rewrite(query); - Weight weight = searcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f); MultiBucketConsumer bucketConsumer = new MultiBucketConsumer(maxBucket, new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); C root = createAggregator(query, builder, searcher, bucketConsumer, fieldTypes); - for (ShardSearcher subSearcher : subSearchers) { - MultiBucketConsumer shardBucketConsumer = new MultiBucketConsumer(maxBucket, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); - C a = createAggregator(query, builder, subSearcher, indexSettings, shardBucketConsumer, fieldTypes); - a.preCollection(); - subSearcher.search(weight, a); - a.postCollection(); - InternalAggregation agg = a.buildTopLevel(); - aggs.add(agg); - } - if (aggs.isEmpty()) { - return (A) root.buildEmptyAggregation(); - } else { - if (randomBoolean() && aggs.size() > 1) { - // sometimes do an incremental reduce - int toReduceSize = aggs.size(); - Collections.shuffle(aggs, random()); - int r = randomIntBetween(1, toReduceSize); - List toReduce = aggs.subList(0, r); - InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forPartialReduction( - root.context().bigArrays(), getMockScriptService(), () -> PipelineAggregator.PipelineTree.EMPTY); - A reduced = (A) aggs.get(0).reduce(toReduce, context); - aggs = new ArrayList<>(aggs.subList(r, toReduceSize)); - aggs.add(reduced); + if (randomBoolean() && searcher.getIndexReader().leaves().size() > 0) { + assertThat(ctx, instanceOf(CompositeReaderContext.class)); + final CompositeReaderContext compCTX = (CompositeReaderContext) ctx; + final int size = compCTX.leaves().size(); + final ShardSearcher[] subSearchers = new ShardSearcher[size]; + for (int searcherIDX = 0; searcherIDX < subSearchers.length; searcherIDX++) { + final LeafReaderContext leave = compCTX.leaves().get(searcherIDX); + subSearchers[searcherIDX] = new ShardSearcher(leave, compCTX); } - // now do the final reduce - MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket, - new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); - InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( - root.context().bigArrays(), getMockScriptService(), reduceBucketConsumer, pipelines); + for (ShardSearcher subSearcher : subSearchers) { + MultiBucketConsumer shardBucketConsumer = new MultiBucketConsumer(maxBucket, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + C a = createAggregator(query, builder, subSearcher, indexSettings, shardBucketConsumer, fieldTypes); + a.preCollection(); + Weight weight = subSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f); + subSearcher.search(weight, a); + a.postCollection(); + aggs.add(a.buildTopLevel()); + } + } else { + root.preCollection(); + searcher.search(rewritten, root); + root.postCollection(); + aggs.add(root.buildTopLevel()); + } + + if (randomBoolean() && aggs.size() > 1) { + // sometimes do an incremental reduce + int toReduceSize = aggs.size(); + Collections.shuffle(aggs, random()); + int r = randomIntBetween(1, toReduceSize); + List toReduce = aggs.subList(0, r); + InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forPartialReduction( + root.context().bigArrays(), getMockScriptService(), () -> PipelineAggregator.PipelineTree.EMPTY); + A reduced = (A) aggs.get(0).reduce(toReduce, context); + aggs = new ArrayList<>(aggs.subList(r, toReduceSize)); + aggs.add(reduced); + } + + // now do the final reduce + MultiBucketConsumer reduceBucketConsumer = new MultiBucketConsumer(maxBucket, + new NoneCircuitBreakerService().getBreaker(CircuitBreaker.REQUEST)); + InternalAggregation.ReduceContext context = InternalAggregation.ReduceContext.forFinalReduction( + root.context().bigArrays(), getMockScriptService(), reduceBucketConsumer, pipelines); - @SuppressWarnings("unchecked") - A internalAgg = (A) aggs.get(0).reduce(aggs, context); + @SuppressWarnings("unchecked") + A internalAgg = (A) aggs.get(0).reduce(aggs, context); - // materialize any parent pipelines - internalAgg = (A) internalAgg.reducePipelines(internalAgg, context, pipelines); + // materialize any parent pipelines + internalAgg = (A) internalAgg.reducePipelines(internalAgg, context, pipelines); - // materialize any sibling pipelines at top level - for (PipelineAggregator pipelineAggregator : pipelines.aggregators()) { - internalAgg = (A) pipelineAggregator.reduce(internalAgg, context); - } - doAssertReducedMultiBucketConsumer(internalAgg, reduceBucketConsumer); - return internalAgg; + // materialize any sibling pipelines at top level + for (PipelineAggregator pipelineAggregator : pipelines.aggregators()) { + internalAgg = (A) pipelineAggregator.reduce(internalAgg, context); } - + doAssertReducedMultiBucketConsumer(internalAgg, reduceBucketConsumer); + return internalAgg; } protected void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) { diff --git a/x-pack/docs/en/security/authorization/alias-privileges.asciidoc b/x-pack/docs/en/security/authorization/alias-privileges.asciidoc index f1163aa2224ff..989171d84ed9d 100644 --- a/x-pack/docs/en/security/authorization/alias-privileges.asciidoc +++ b/x-pack/docs/en/security/authorization/alias-privileges.asciidoc @@ -17,37 +17,38 @@ to control access to a data stream. Any role or user granted privileges to a data stream are automatically granted the same privileges to its backing indices. -`logs` is a data stream that consists of two backing indices: `.ds-logs-000001` -and `.ds-logs-000002`. +For example, `my-data-stream` consists of two backing indices: +`.ds-my-data-stream-000001` and `.ds-my-data-stream-000002`. -A user is granted the `read` privilege to the `logs` data stream. +A user is granted the `read` privilege to `my-data-stream`. [source,js] -------------------------------------------------- { - "names" : [ "logs" ], + "names" : [ "my-data-stream" ], "privileges" : [ "read" ] } -------------------------------------------------- // NOTCONSOLE Because the user is automatically granted the same privileges to the stream's -backing indices, the user can retrieve a document directly from `.ds-logs-000002`: +backing indices, the user can retrieve a document directly from +`.ds-my-data-stream-000002`: //// [source,console] ---- -PUT /_index_template/logs_data_stream +PUT /_index_template/my-data-stream-template { - "index_patterns": [ "logs*" ], + "index_patterns": [ "my-data-stream*" ], "data_stream": { } } -PUT /_data_stream/logs +PUT /_data_stream/my-data-stream -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ -PUT /logs/_create/2?refresh=wait_for +PUT /my-data-stream/_create/2?refresh=wait_for { "@timestamp": "2020-12-07T11:06:07.000Z" } @@ -56,21 +57,21 @@ PUT /logs/_create/2?refresh=wait_for [source,console] ---- -GET /.ds-logs-000002/_doc/2 +GET /.ds-my-data-stream-000002/_doc/2 ---- // TEST[continued] -Later the `logs` data stream <>. -This creates a new backing index: `.ds-logs-000003`. Because the user still has -the `read` privilege for the `logs` data stream, the user can retrieve documents -directly from `.ds-logs-000003`: +Later `my-data-stream` <>. This +creates a new backing index: `.ds-my-data-stream-000003`. Because the user still +has the `read` privilege for `my-data-stream`, the user can retrieve +documents directly from `.ds-my-data-stream-000003`: //// [source,console] ---- -POST /logs/_rollover/ +POST /my-data-stream/_rollover/ -PUT /logs/_create/2?refresh=wait_for +PUT /my-data-stream/_create/2?refresh=wait_for { "@timestamp": "2020-12-07T11:06:07.000Z" } @@ -80,7 +81,7 @@ PUT /logs/_create/2?refresh=wait_for [source,console] ---- -GET /.ds-logs-000003/_doc/2 +GET /.ds-my-data-stream-000003/_doc/2 ---- // TEST[continued] diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java index bddea56afb376..1675708122ef1 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/aggregations/metrics/TDigestPreAggregatedPercentileRanksAggregatorTests.java @@ -94,7 +94,7 @@ public void testSimple() throws IOException { MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", true, Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - PercentileRanks ranks = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); Iterator rankIterator = ranks.iterator(); Percentile rank = rankIterator.next(); assertEquals(0.1, rank.getValue(), 0d); diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java index f594422d571a6..d46b75ad31f1d 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorTests.java @@ -531,7 +531,7 @@ private InternalAggregation collect(AggregationBuilder builder, Query query, try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = newSearcher(indexReader, true, true); - return search(indexSearcher, query, builder, fields); + return searchAndReduce(indexSearcher, query, builder, fields); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/pivot/TermsGroupSourceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/pivot/TermsGroupSourceTests.java index 0ceda5815c9f0..b2bf314559b85 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/pivot/TermsGroupSourceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/pivot/TermsGroupSourceTests.java @@ -48,9 +48,4 @@ protected Reader instanceReader() { return TermsGroupSource::new; } - public void testSupportsIncrementalBucketUpdate() { - TermsGroupSource terms = randomTermsGroupSource(); - assertEquals(terms.getScriptConfig() == null, terms.supportsIncrementalBucketUpdate()); - } - } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index 776b83afad850..ceac4f1e21a0b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -8,6 +8,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.index.IndexRequest; @@ -87,7 +88,9 @@ public void run(String modelId) { inferTestDocs(localModel, testDocsIterator); } } catch (Exception e) { - throw ExceptionsHelper.serverError("[{}] failed running inference on model [{}]", e, config.getId(), modelId); + LOGGER.error(new ParameterizedMessage("[{}] Error during inference against model [{}]", config.getId(), modelId), e); + throw ExceptionsHelper.serverError("[{}] failed running inference on model [{}]; cause was [{}]", e, config.getId(), modelId, + e.getMessage()); } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 35eeece14f0cb..b603e1b40280e 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -654,7 +654,7 @@ protected void doNextSearch(SearchRequest request, ActionListener { + long originLong = DateFieldType.parseToLong( + origin, + true, + null, + dateTimeFormatter.toDateMathParser(), + now, + DateFieldMapper.Resolution.MILLISECONDS + ); + TimeValue pivotTime = TimeValue.parseTimeValue(pivot, "distance_feature.pivot"); + return new LongScriptFieldDistanceFeatureQuery( + script, + leafFactory(context.lookup())::newInstance, + name(), + originLong, + pivotTime.getMillis(), + boost + ); + }); + } + @Override public Query existsQuery(QueryShardContext context) { checkAllowExpensiveQueries(context); diff --git a/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java b/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java new file mode 100644 index 0000000000000..91be5f6a2e15e --- /dev/null +++ b/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.runtimefields.query; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.script.Script; +import org.elasticsearch.xpack.runtimefields.AbstractLongScriptFieldScript; + +import java.io.IOException; +import java.util.Objects; +import java.util.Set; + +public final class LongScriptFieldDistanceFeatureQuery extends AbstractScriptFieldQuery { + private final CheckedFunction leafFactory; + private final long origin; + private final long pivot; + private final float boost; + + public LongScriptFieldDistanceFeatureQuery( + Script script, + CheckedFunction leafFactory, + String fieldName, + long origin, + long pivot, + float boost + ) { + super(script, fieldName); + this.leafFactory = leafFactory; + this.origin = origin; + this.pivot = pivot; + this.boost = boost; + } + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new Weight(this) { + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return false; + } + + @Override + public void extractTerms(Set terms) {} + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + return new DistanceScorer(this, leafFactory.apply(context), context.reader().maxDoc()); + } + + @Override + public Explanation explain(LeafReaderContext context, int doc) throws IOException { + AbstractLongScriptFieldScript script = leafFactory.apply(context); + script.runForDoc(doc); + long value = valueWithMinAbsoluteDistance(script); + float score = score(distanceFor(value)); + return Explanation.match( + score, + "Distance score, computed as weight * pivot / (pivot + abs(value - origin)) from:", + Explanation.match(boost, "weight"), + Explanation.match(pivot, "pivot"), + Explanation.match(origin, "origin"), + Explanation.match(value, "current value") + ); + } + }; + } + + private class DistanceScorer extends Scorer { + private final AbstractLongScriptFieldScript script; + private final TwoPhaseIterator twoPhase; + private final DocIdSetIterator disi; + + protected DistanceScorer(Weight weight, AbstractLongScriptFieldScript script, int maxDoc) { + super(weight); + this.script = script; + twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { + @Override + public boolean matches() throws IOException { + script.runForDoc(approximation().docID()); + return script.count() > 0; + } + + @Override + public float matchCost() { + return MATCH_COST; + } + }; + disi = TwoPhaseIterator.asDocIdSetIterator(twoPhase); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public DocIdSetIterator iterator() { + return disi; + } + + @Override + public TwoPhaseIterator twoPhaseIterator() { + return twoPhase; + } + + @Override + public float getMaxScore(int upTo) throws IOException { + return boost; + } + + @Override + public float score() throws IOException { + if (script.count() == 0) { + return 0; + } + return LongScriptFieldDistanceFeatureQuery.this.score((double) minAbsoluteDistance(script)); + } + } + + long minAbsoluteDistance(AbstractLongScriptFieldScript script) { + long minDistance = Long.MAX_VALUE; + for (int i = 0; i < script.count(); i++) { + minDistance = Math.min(minDistance, distanceFor(script.values()[i])); + } + return minDistance; + } + + long valueWithMinAbsoluteDistance(AbstractLongScriptFieldScript script) { + long minDistance = Long.MAX_VALUE; + long minDistanceValue = Long.MAX_VALUE; + for (int i = 0; i < script.count(); i++) { + long distance = distanceFor(script.values()[i]); + if (distance < minDistance) { + minDistance = distance; + minDistanceValue = script.values()[i]; + } + } + return minDistanceValue; + } + + long distanceFor(long value) { + long distance = Math.max(value, origin) - Math.min(value, origin); + if (distance < 0) { + // The distance doesn't fit into signed long so clamp it to MAX_VALUE + return Long.MAX_VALUE; + } + return distance; + } + + float score(double distance) { + return (float) (boost * (pivot / (pivot + distance))); + } + + @Override + public String toString(String field) { + StringBuilder b = new StringBuilder(); + if (false == fieldName().equals(field)) { + b.append(fieldName()).append(":"); + } + b.append(getClass().getSimpleName()).append("(origin=").append(origin).append(",pivot=").append(pivot).append(")"); + return b.toString(); + + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), origin, pivot); + } + + @Override + public boolean equals(Object obj) { + if (false == super.equals(obj)) { + return false; + } + LongScriptFieldDistanceFeatureQuery other = (LongScriptFieldDistanceFeatureQuery) obj; + return origin == other.origin && pivot == other.pivot; + } + + @Override + public void visit(QueryVisitor visitor) { + // No subclasses contain any Terms because those have to be strings. + if (visitor.acceptField(fieldName())) { + visitor.visitLeaf(this); + } + } + + long origin() { + return origin; + } + + long pivot() { + return pivot; + } +} diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/AbstractScriptMappedFieldTypeTestCase.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/AbstractScriptMappedFieldTypeTestCase.java index 684057c9c90af..a44a6b5b5ae28 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/AbstractScriptMappedFieldTypeTestCase.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/AbstractScriptMappedFieldTypeTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.runtimefields.mapper; +import org.apache.lucene.index.IndexReader; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryShardContext; @@ -124,4 +125,8 @@ private void assertQueryOnlyOnText(String queryName, ThrowingRunnable buildQuery ) ); } + + protected String readSource(IndexReader reader, int docId) throws IOException { + return reader.document(docId).getBinaryValue("_source").utf8ToString(); + } } diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java index cebe31f7a5c25..94c1f46001902 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java @@ -12,13 +12,16 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; @@ -59,6 +62,8 @@ import java.util.function.BiConsumer; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -146,18 +151,9 @@ public void testSort() throws IOException { ScriptDateFieldData ifd = simpleMappedFieldType().fielddataBuilder("test", mockContext()::lookup).build(null, null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat( - reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), - equalTo("{\"timestamp\": [1595432181351]}") - ); - assertThat( - reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), - equalTo("{\"timestamp\": [1595432181354]}") - ); - assertThat( - reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), - equalTo("{\"timestamp\": [1595432181356]}") - ); + assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181351]}")); + assertThat(readSource(reader, docs.scoreDocs[1].doc), equalTo("{\"timestamp\": [1595432181354]}")); + assertThat(readSource(reader, docs.scoreDocs[2].doc), equalTo("{\"timestamp\": [1595432181356]}")); } } } @@ -192,6 +188,37 @@ public double execute(ExplanationHolder explanation) { } } + public void testDistanceFeatureQuery() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocuments( + List.of( + List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}"))), + List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}"))), + List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181356, 1]}"))), + List.of(new StoredField("_source", new BytesRef("{\"timestamp\": []}"))) + ) + ); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + Query query = simpleMappedFieldType().distanceFeatureQuery(1595432181354L, "1ms", 1, mockContext()); + TopDocs docs = searcher.search(query, 4); + assertThat(docs.scoreDocs, arrayWithSize(3)); + assertThat(readSource(reader, docs.scoreDocs[0].doc), equalTo("{\"timestamp\": [1595432181354]}")); + assertThat(docs.scoreDocs[0].score, equalTo(1.0F)); + assertThat(readSource(reader, docs.scoreDocs[1].doc), equalTo("{\"timestamp\": [1595432181356, 1]}")); + assertThat((double) docs.scoreDocs[1].score, closeTo(.333, .001)); + assertThat(readSource(reader, docs.scoreDocs[2].doc), equalTo("{\"timestamp\": [1595432181351]}")); + assertThat((double) docs.scoreDocs[2].score, closeTo(.250, .001)); + Explanation explanation = query.createWeight(searcher, ScoreMode.TOP_SCORES, 0) + .explain(reader.le, docs.scoreDocs[0].doc); + } + } + } + + public void testDistanceFeatureQueryIsExpensive() throws IOException { + checkExpensiveQuery((ft, ctx) -> ft.distanceFeatureQuery(randomLong(), randomAlphaOfLength(5), randomFloat(), ctx)); + } + @Override public void testExistsQuery() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { @@ -409,7 +436,7 @@ private DateScriptFieldScript.Factory factory(String code) { @Override public void execute() { for (Object timestamp : (List) getSource().get("timestamp")) { - new DateScriptFieldScript.Millis(this).millis((Long) timestamp); + new DateScriptFieldScript.Millis(this).millis(((Number) timestamp).longValue()); } } }; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/aggregations/metrics/GeoShapeCentroidAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/aggregations/metrics/GeoShapeCentroidAggregatorTests.java index a83ac51c5fc7c..55cda29657285 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/aggregations/metrics/GeoShapeCentroidAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/aggregations/metrics/GeoShapeCentroidAggregatorTests.java @@ -65,7 +65,7 @@ public void testEmpty() throws Exception { = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -86,12 +86,12 @@ public void testUnmapped() throws Exception { MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("another_field", true, true, Collections.emptyMap()); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap()); - result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertNull(result.centroid()); assertFalse(AggregationInspectionHelper.hasValue(result)); } @@ -116,7 +116,7 @@ public void testUnmappedWithMissing() throws Exception { MappedFieldType fieldType = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("another_field", true, true, Collections.emptyMap()); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertThat(result.centroid(), equalTo(expectedCentroid)); assertTrue(AggregationInspectionHelper.hasValue(result)); } @@ -166,6 +166,8 @@ public void testSingleValuedField() throws Exception { compensatedSumWeight.add(weight); } } + // force using a single aggregator to compute the centroid + w.forceMerge(1); GeoPoint expectedCentroid = new GeoPoint(compensatedSumLat.value() / compensatedSumWeight.value(), compensatedSumLon.value() / compensatedSumWeight.value()); assertCentroid(w, expectedCentroid); @@ -179,7 +181,7 @@ private void assertCentroid(RandomIndexWriter w, GeoPoint expectedCentroid) thro .field("field"); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoCentroid result = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoCentroid result = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertEquals("my_agg", result.getName()); GeoPoint centroid = result.centroid(); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java index 6eb076cba6d7b..d5045affa949a 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/metrics/GeoShapeBoundsAggregatorTests.java @@ -61,7 +61,7 @@ public void testEmpty() throws Exception { = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -89,7 +89,7 @@ public void testUnmappedFieldWithDocs() throws Exception { = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertTrue(Double.isInfinite(bounds.top)); assertTrue(Double.isInfinite(bounds.bottom)); assertTrue(Double.isInfinite(bounds.posLeft)); @@ -122,7 +122,7 @@ public void testMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertThat(bounds.top, equalTo(lat)); assertThat(bounds.bottom, equalTo(lat)); assertThat(bounds.posLeft, equalTo(lon >= 0 ? lon : Double.POSITIVE_INFINITY)); @@ -149,7 +149,7 @@ public void testInvalidMissing() throws Exception { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, - () -> search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType)); + () -> searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType)); assertThat(exception.getMessage(), startsWith("Unknown geometry type")); } } @@ -204,7 +204,7 @@ public void testRandomShapes() throws Exception { = new GeoShapeWithDocValuesFieldMapper.GeoShapeWithDocValuesFieldType("field", true, true, Collections.emptyMap()); try (IndexReader reader = w.getReader()) { IndexSearcher searcher = new IndexSearcher(reader); - InternalGeoBounds bounds = search(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); assertThat(bounds.top, closeTo(top, GEOHASH_TOLERANCE)); assertThat(bounds.bottom, closeTo(bottom, GEOHASH_TOLERANCE)); assertThat(bounds.posLeft, closeTo(posLeft, GEOHASH_TOLERANCE)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java index 509b3fee5b1d4..09be0407dd1a4 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/querydsl/agg/GroupByKey.java @@ -42,21 +42,21 @@ public final CompositeValuesSourceBuilder asValueSource() { if (script != null) { builder.script(script.toPainless()); if (script.outputType().isInteger()) { - builder.valueType(ValueType.LONG); + builder.userValuetypeHint(ValueType.LONG); } else if (script.outputType().isRational()) { - builder.valueType(ValueType.DOUBLE); + builder.userValuetypeHint(ValueType.DOUBLE); } else if (DataTypes.isString(script.outputType())) { - builder.valueType(ValueType.STRING); + builder.userValuetypeHint(ValueType.STRING); } else if (script.outputType() == DATE) { - builder.valueType(ValueType.LONG); + builder.userValuetypeHint(ValueType.LONG); } else if (script.outputType() == TIME) { - builder.valueType(ValueType.LONG); + builder.userValuetypeHint(ValueType.LONG); } else if (script.outputType() == DATETIME) { - builder.valueType(ValueType.LONG); + builder.userValuetypeHint(ValueType.LONG); } else if (script.outputType() == BOOLEAN) { - builder.valueType(ValueType.BOOLEAN); + builder.userValuetypeHint(ValueType.BOOLEAN); } else if (script.outputType() == IP) { - builder.valueType(ValueType.IP); + builder.userValuetypeHint(ValueType.IP); } } // field based diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 0233083ac83d2..0f7ee5842a4bf 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -689,7 +689,7 @@ public void testLikeRLikeAsPainlessScripts() { public void testStartsWithUsesPrefixQuery() { LogicalPlan p = plan("SELECT keyword FROM test WHERE STARTS_WITH(keyword, 'x') OR STARTS_WITH(keyword, 'y')"); - + assertTrue(p instanceof Project); assertTrue(p.children().get(0) instanceof Filter); Expression condition = ((Filter) p.children().get(0)).condition(); @@ -698,7 +698,7 @@ public void testStartsWithUsesPrefixQuery() { QueryTranslation translation = translate(condition); assertTrue(translation.query instanceof BoolQuery); BoolQuery bq = (BoolQuery) translation.query; - + assertFalse(bq.isAnd()); assertTrue(bq.left() instanceof PrefixQuery); assertTrue(bq.right() instanceof PrefixQuery); @@ -706,7 +706,7 @@ public void testStartsWithUsesPrefixQuery() { PrefixQuery pqr = (PrefixQuery) bq.right(); assertEquals("keyword", pqr.field()); assertEquals("y", pqr.query()); - + PrefixQuery pql = (PrefixQuery) bq.left(); assertEquals("keyword", pql.field()); assertEquals("x", pql.query()); @@ -715,7 +715,7 @@ public void testStartsWithUsesPrefixQuery() { public void testStartsWithUsesPrefixQueryAndScript() { LogicalPlan p = plan("SELECT keyword FROM test WHERE STARTS_WITH(keyword, 'x') AND STARTS_WITH(keyword, 'xy') " + "AND STARTS_WITH(LCASE(keyword), 'xyz')"); - + assertTrue(p instanceof Project); assertTrue(p.children().get(0) instanceof Filter); Expression condition = ((Filter) p.children().get(0)).condition(); @@ -724,7 +724,7 @@ public void testStartsWithUsesPrefixQueryAndScript() { QueryTranslation translation = translate(condition); assertTrue(translation.query instanceof BoolQuery); BoolQuery bq = (BoolQuery) translation.query; - + assertTrue(bq.isAnd()); assertTrue(bq.left() instanceof BoolQuery); assertTrue(bq.right() instanceof ScriptQuery); @@ -734,7 +734,7 @@ public void testStartsWithUsesPrefixQueryAndScript() { PrefixQuery pqr = (PrefixQuery) bbq.right(); assertEquals("keyword", pqr.field()); assertEquals("xy", pqr.query()); - + PrefixQuery pql = (PrefixQuery) bbq.left(); assertEquals("keyword", pql.field()); assertEquals("x", pql.query()); @@ -1304,7 +1304,7 @@ public void testGroupByHistogramQueryTranslator() { assertEquals("MAX(int)", eqe.output().get(0).qualifiedName()); assertEquals(INTEGER, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - containsString("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + containsString("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"order\":\"asc\"," + "\"fixed_interval\":\"62208000000ms\",\"time_zone\":\"Z\"}}}]}")); } @@ -1319,7 +1319,7 @@ public void testGroupByHistogramWithScalarsQueryTranslator() { assertEquals("h", eqe.output().get(1).qualifiedName()); assertEquals(DATETIME, eqe.output().get(1).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - containsString("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\"," + + containsString("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true," + "\"order\":\"asc\",\"fixed_interval\":\"139968000000ms\",\"time_zone\":\"Z\"}}}]}")); } @@ -1331,10 +1331,10 @@ public void testGroupByYearQueryTranslator() { assertEquals("YEAR(date)", eqe.output().get(0).qualifiedName()); assertEquals(INTEGER, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"order\":\"asc\"," + "\"calendar_interval\":\"1y\",\"time_zone\":\"Z\"}}}]}}}")); } - + public void testGroupByOneMonthHistogramQueryTranslator() { PhysicalPlan p = optimizeAndPlan("SELECT HISTOGRAM(date, INTERVAL 1 MONTH) AS h FROM test GROUP BY h"); assertEquals(EsQueryExec.class, p.getClass()); @@ -1343,10 +1343,10 @@ public void testGroupByOneMonthHistogramQueryTranslator() { assertEquals("h", eqe.output().get(0).qualifiedName()); assertEquals(DATETIME, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"order\":\"asc\"," + "\"calendar_interval\":\"1M\",\"time_zone\":\"Z\"}}}]}}}")); } - + public void testGroupByMoreMonthsHistogramQueryTranslator() { PhysicalPlan p = optimizeAndPlan("SELECT HISTOGRAM(date, INTERVAL 5 MONTH) AS h FROM test GROUP BY h"); assertEquals(EsQueryExec.class, p.getClass()); @@ -1355,7 +1355,7 @@ public void testGroupByMoreMonthsHistogramQueryTranslator() { assertEquals("h", eqe.output().get(0).qualifiedName()); assertEquals(DATETIME, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"order\":\"asc\"," + "\"fixed_interval\":\"12960000000ms\",\"time_zone\":\"Z\"}}}]}}}")); } @@ -1367,10 +1367,10 @@ public void testGroupByOneDayHistogramQueryTranslator() { assertEquals("h", eqe.output().get(0).qualifiedName()); assertEquals(DATETIME, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"order\":\"asc\"," + "\"calendar_interval\":\"1d\",\"time_zone\":\"Z\"}}}]}}}")); } - + public void testGroupByMoreDaysHistogramQueryTranslator() { PhysicalPlan p = optimizeAndPlan("SELECT HISTOGRAM(date, INTERVAL '1 5' DAY TO HOUR) AS h FROM test GROUP BY h"); assertEquals(EsQueryExec.class, p.getClass()); @@ -1379,7 +1379,7 @@ public void testGroupByMoreDaysHistogramQueryTranslator() { assertEquals("h", eqe.output().get(0).qualifiedName()); assertEquals(DATETIME, eqe.output().get(0).dataType()); assertThat(eqe.queryContainer().aggs().asAggBuilder().toString().replaceAll("\\s+", ""), - endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"value_type\":\"date\",\"order\":\"asc\"," + endsWith("\"date_histogram\":{\"field\":\"date\",\"missing_bucket\":true,\"order\":\"asc\"," + "\"fixed_interval\":\"104400000ms\",\"time_zone\":\"Z\"}}}]}}}")); } diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/DataHistogramGroupByIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/DataHistogramGroupByIT.java index d6bc9a2c5613c..c543ac433ac9b 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/DataHistogramGroupByIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/DataHistogramGroupByIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.transform.integration.continuous; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -37,6 +38,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/60781") public class DataHistogramGroupByIT extends ContinuousTestCase { private static final String NAME = "continuous-date-histogram-pivot-test"; private static final String MISSING_BUCKET_KEY = ContinuousTestCase.STRICT_DATE_OPTIONAL_TIME_PRINTER_NANOS.withZone(ZoneId.of("UTC")) diff --git a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java index 0b1c189d9c398..4827cb5f5b591 100644 --- a/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java +++ b/x-pack/plugin/transform/qa/single-node-tests/src/test/java/org/elasticsearch/xpack/transform/integration/continuous/TermsGroupByIT.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.transform.integration.continuous; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -33,6 +34,7 @@ import static org.hamcrest.Matchers.equalTo; +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/60781") public class TermsGroupByIT extends ContinuousTestCase { private static final String NAME = "continuous-terms-pivot-test"; From f788941140488bb51c7950ac11797ab8a606b6b9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 6 Aug 2020 17:34:57 -0400 Subject: [PATCH 2/3] Finish test --- .../LongScriptFieldDistanceFeatureQuery.java | 17 ++++++++++------- .../mapper/ScriptDateMappedFieldTypeTests.java | 9 +++++++-- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java b/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java index 91be5f6a2e15e..ce04cd43bd4d8 100644 --- a/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java +++ b/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java @@ -58,7 +58,7 @@ public void extractTerms(Set terms) {} @Override public Scorer scorer(LeafReaderContext context) throws IOException { - return new DistanceScorer(this, leafFactory.apply(context), context.reader().maxDoc()); + return new DistanceScorer(this, leafFactory.apply(context), context.reader().maxDoc(), boost); } @Override @@ -66,11 +66,12 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio AbstractLongScriptFieldScript script = leafFactory.apply(context); script.runForDoc(doc); long value = valueWithMinAbsoluteDistance(script); - float score = score(distanceFor(value)); + float weight = LongScriptFieldDistanceFeatureQuery.this.boost * boost; + float score = score(weight, distanceFor(value)); return Explanation.match( score, "Distance score, computed as weight * pivot / (pivot + abs(value - origin)) from:", - Explanation.match(boost, "weight"), + Explanation.match(weight, "weight"), Explanation.match(pivot, "pivot"), Explanation.match(origin, "origin"), Explanation.match(value, "current value") @@ -83,8 +84,9 @@ private class DistanceScorer extends Scorer { private final AbstractLongScriptFieldScript script; private final TwoPhaseIterator twoPhase; private final DocIdSetIterator disi; + private final float weight; - protected DistanceScorer(Weight weight, AbstractLongScriptFieldScript script, int maxDoc) { + protected DistanceScorer(Weight weight, AbstractLongScriptFieldScript script, int maxDoc, float boost) { super(weight); this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @@ -100,6 +102,7 @@ public float matchCost() { } }; disi = TwoPhaseIterator.asDocIdSetIterator(twoPhase); + this.weight = LongScriptFieldDistanceFeatureQuery.this.boost * boost; } @Override @@ -127,7 +130,7 @@ public float score() throws IOException { if (script.count() == 0) { return 0; } - return LongScriptFieldDistanceFeatureQuery.this.score((double) minAbsoluteDistance(script)); + return LongScriptFieldDistanceFeatureQuery.this.score(weight, (double) minAbsoluteDistance(script)); } } @@ -161,8 +164,8 @@ long distanceFor(long value) { return distance; } - float score(double distance) { - return (float) (boost * (pivot / (pivot + distance))); + float score(float weight, double distance) { + return (float) (weight * (pivot / (pivot + distance))); } @Override diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java index 94c1f46001902..168aee9e6e666 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/mapper/ScriptDateMappedFieldTypeTests.java @@ -209,8 +209,13 @@ public void testDistanceFeatureQuery() throws IOException { assertThat((double) docs.scoreDocs[1].score, closeTo(.333, .001)); assertThat(readSource(reader, docs.scoreDocs[2].doc), equalTo("{\"timestamp\": [1595432181351]}")); assertThat((double) docs.scoreDocs[2].score, closeTo(.250, .001)); - Explanation explanation = query.createWeight(searcher, ScoreMode.TOP_SCORES, 0) - .explain(reader.le, docs.scoreDocs[0].doc); + Explanation explanation = query.createWeight(searcher, ScoreMode.TOP_SCORES, 1.0F) + .explain(reader.leaves().get(0), docs.scoreDocs[0].doc); + assertThat(explanation.toString(), containsString("1.0 = Distance score, computed as weight * pivot / (pivot")); + assertThat(explanation.toString(), containsString("1.0 = weight")); + assertThat(explanation.toString(), containsString("1 = pivot")); + assertThat(explanation.toString(), containsString("1595432181354 = origin")); + assertThat(explanation.toString(), containsString("1595432181354 = current value")); } } } From 9739a0c95eae2c0cd98c46d71dd93133026df4ac Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 13 Aug 2020 15:43:41 -0400 Subject: [PATCH 3/3] Update tests --- .../LongScriptFieldDistanceFeatureQuery.java | 11 +- ...bstractDoubleScriptFieldQueryTestCase.java | 29 +--- .../AbstractIpScriptFieldQueryTestCase.java | 28 +--- .../AbstractLongScriptFieldQueryTestCase.java | 29 +--- .../AbstractScriptFieldQueryTestCase.java | 33 +++- ...gScriptFieldDistanceFeatureQueryTests.java | 142 ++++++++++++++++++ 6 files changed, 186 insertions(+), 86 deletions(-) create mode 100644 x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQueryTests.java diff --git a/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java b/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java index ce04cd43bd4d8..d91d6b98240c2 100644 --- a/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java +++ b/x-pack/plugin/runtime-fields/src/main/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQuery.java @@ -122,7 +122,7 @@ public TwoPhaseIterator twoPhaseIterator() { @Override public float getMaxScore(int upTo) throws IOException { - return boost; + return weight; } @Override @@ -174,7 +174,10 @@ public String toString(String field) { if (false == fieldName().equals(field)) { b.append(fieldName()).append(":"); } - b.append(getClass().getSimpleName()).append("(origin=").append(origin).append(",pivot=").append(pivot).append(")"); + b.append(getClass().getSimpleName()); + b.append("(origin=").append(origin); + b.append(",pivot=").append(pivot); + b.append(",boost=").append(boost).append(")"); return b.toString(); } @@ -208,4 +211,8 @@ long origin() { long pivot() { return pivot; } + + float boost() { + return boost; + } } diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractDoubleScriptFieldQueryTestCase.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractDoubleScriptFieldQueryTestCase.java index 59bd856c3cfd6..ce05013318771 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractDoubleScriptFieldQueryTestCase.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractDoubleScriptFieldQueryTestCase.java @@ -6,17 +6,8 @@ package org.elasticsearch.xpack.runtimefields.query; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryVisitor; -import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.xpack.runtimefields.DoubleScriptFieldScript; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; public abstract class AbstractDoubleScriptFieldQueryTestCase extends @@ -26,24 +17,6 @@ public abstract class AbstractDoubleScriptFieldQueryTestCase leavesVisited = new ArrayList<>(); - query.visit(new QueryVisitor() { - @Override - public void consumeTerms(Query query, Term... terms) { - fail(); - } - - @Override - public void consumeTermsMatching(Query query, String field, Supplier automaton) { - fail(); - } - - @Override - public void visitLeaf(Query query) { - leavesVisited.add(query); - } - }); - assertThat(leavesVisited, equalTo(List.of(query))); + assertEmptyVisit(); } } diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractIpScriptFieldQueryTestCase.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractIpScriptFieldQueryTestCase.java index 1f2ec87024896..d57b8820752d8 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractIpScriptFieldQueryTestCase.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractIpScriptFieldQueryTestCase.java @@ -7,19 +7,11 @@ package org.elasticsearch.xpack.runtimefields.query; import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.xpack.runtimefields.IpScriptFieldScript; import java.net.InetAddress; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; -import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; public abstract class AbstractIpScriptFieldQueryTestCase extends AbstractScriptFieldQueryTestCase { @@ -28,25 +20,7 @@ public abstract class AbstractIpScriptFieldQueryTestCase leavesVisited = new ArrayList<>(); - query.visit(new QueryVisitor() { - @Override - public void consumeTerms(Query query, Term... terms) { - fail(); - } - - @Override - public void consumeTermsMatching(Query query, String field, Supplier automaton) { - fail(); - } - - @Override - public void visitLeaf(Query query) { - leavesVisited.add(query); - } - }); - assertThat(leavesVisited, equalTo(List.of(query))); + assertEmptyVisit(); } protected static BytesRef encode(InetAddress addr) { diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractLongScriptFieldQueryTestCase.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractLongScriptFieldQueryTestCase.java index 82a3543321fc8..7958d906a8594 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractLongScriptFieldQueryTestCase.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractLongScriptFieldQueryTestCase.java @@ -7,19 +7,10 @@ package org.elasticsearch.xpack.runtimefields.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.QueryVisitor; -import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.xpack.runtimefields.AbstractLongScriptFieldScript; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.function.Supplier; - -import static org.hamcrest.Matchers.equalTo; public abstract class AbstractLongScriptFieldQueryTestCase extends AbstractScriptFieldQueryTestCase< T> { @@ -27,24 +18,6 @@ public abstract class AbstractLongScriptFieldQueryTestCase leavesVisited = new ArrayList<>(); - query.visit(new QueryVisitor() { - @Override - public void consumeTerms(Query query, Term... terms) { - fail(); - } - - @Override - public void consumeTermsMatching(Query query, String field, Supplier automaton) { - fail(); - } - - @Override - public void visitLeaf(Query query) { - leavesVisited.add(query); - } - }); - assertThat(leavesVisited, equalTo(List.of(query))); + assertEmptyVisit(); } } diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractScriptFieldQueryTestCase.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractScriptFieldQueryTestCase.java index 743450cd5f933..b8ad9406b8d00 100644 --- a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractScriptFieldQueryTestCase.java +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/AbstractScriptFieldQueryTestCase.java @@ -6,10 +6,19 @@ package org.elasticsearch.xpack.runtimefields.query; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.script.Script; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + import static org.hamcrest.Matchers.equalTo; public abstract class AbstractScriptFieldQueryTestCase extends ESTestCase { @@ -27,7 +36,7 @@ public final void testEqualsAndHashCode() { EqualsHashCodeTestUtils.checkEqualsAndHashCode(createTestInstance(), this::copy, this::mutate); } - public abstract void testMatches(); + public abstract void testMatches() throws IOException; public final void testToString() { T query = createTestInstance(); @@ -38,4 +47,26 @@ public final void testToString() { protected abstract void assertToString(T query); public abstract void testVisit(); + + protected final void assertEmptyVisit() { + T query = createTestInstance(); + List leavesVisited = new ArrayList<>(); + query.visit(new QueryVisitor() { + @Override + public void consumeTerms(Query query, Term... terms) { + fail(); + } + + @Override + public void consumeTermsMatching(Query query, String field, Supplier automaton) { + fail(); + } + + @Override + public void visitLeaf(Query query) { + leavesVisited.add(query); + } + }); + assertThat(leavesVisited, equalTo(List.of(query))); + } } diff --git a/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQueryTests.java b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQueryTests.java new file mode 100644 index 0000000000000..160305b400813 --- /dev/null +++ b/x-pack/plugin/runtime-fields/src/test/java/org/elasticsearch/xpack/runtimefields/query/LongScriptFieldDistanceFeatureQueryTests.java @@ -0,0 +1,142 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +package org.elasticsearch.xpack.runtimefields.query; + +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.CheckedFunction; +import org.elasticsearch.script.Script; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.runtimefields.AbstractLongScriptFieldScript; +import org.elasticsearch.xpack.runtimefields.DateScriptFieldScript; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class LongScriptFieldDistanceFeatureQueryTests extends AbstractScriptFieldQueryTestCase { + private final CheckedFunction leafFactory = ctx -> null; + + @Override + protected LongScriptFieldDistanceFeatureQuery createTestInstance() { + long origin = randomLong(); + long pivot = randomValueOtherThan(origin, ESTestCase::randomLong); + return new LongScriptFieldDistanceFeatureQuery(randomScript(), leafFactory, randomAlphaOfLength(5), origin, pivot, randomFloat()); + } + + @Override + protected LongScriptFieldDistanceFeatureQuery copy(LongScriptFieldDistanceFeatureQuery orig) { + return new LongScriptFieldDistanceFeatureQuery( + orig.script(), + leafFactory, + orig.fieldName(), + orig.origin(), + orig.pivot(), + orig.boost() + ); + } + + @Override + protected LongScriptFieldDistanceFeatureQuery mutate(LongScriptFieldDistanceFeatureQuery orig) { + Script script = orig.script(); + String fieldName = orig.fieldName(); + long origin = orig.origin(); + long pivot = orig.pivot(); + float boost = orig.boost(); + switch (randomInt(4)) { + case 0: + script = randomValueOtherThan(script, this::randomScript); + break; + case 1: + fieldName += "modified"; + break; + case 2: + origin = randomValueOtherThan(origin, () -> randomValueOtherThan(orig.pivot(), ESTestCase::randomLong)); + break; + case 3: + pivot = randomValueOtherThan(origin, () -> randomValueOtherThan(orig.pivot(), ESTestCase::randomLong)); + break; + case 4: + boost = randomValueOtherThan(boost, ESTestCase::randomFloat); + break; + default: + fail(); + } + return new LongScriptFieldDistanceFeatureQuery(script, leafFactory, fieldName, origin, pivot, boost); + } + + @Override + public void testMatches() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181354]}")))); + iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + CheckedFunction leafFactory = + ctx -> new DateScriptFieldScript(Map.of(), new SearchLookup(null, null), ctx) { + @Override + public void execute() { + for (Object timestamp : (List) getSource().get("timestamp")) { + new DateScriptFieldScript.Millis(this).millis(((Number) timestamp).longValue()); + } + } + }; + LongScriptFieldDistanceFeatureQuery query = new LongScriptFieldDistanceFeatureQuery( + randomScript(), + leafFactory, + "test", + 1595432181351L, + 6L, + between(1, 100) + ); + TopDocs td = searcher.search(query, 1); + assertThat(td.scoreDocs[0].score, equalTo(query.boost())); + assertThat(td.scoreDocs[0].doc, equalTo(1)); + } + } + } + + public void testMaxScore() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + iw.addDocument(List.of()); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + LongScriptFieldDistanceFeatureQuery query = createTestInstance(); + float boost = randomFloat(); + assertThat( + query.createWeight(searcher, ScoreMode.COMPLETE, boost).scorer(reader.leaves().get(0)).getMaxScore(randomInt()), + equalTo(query.boost() * boost) + ); + } + } + } + + @Override + protected void assertToString(LongScriptFieldDistanceFeatureQuery query) { + assertThat( + query.toString(query.fieldName()), + equalTo( + "LongScriptFieldDistanceFeatureQuery(origin=" + query.origin() + ",pivot=" + query.pivot() + ",boost=" + query.boost() + ")" + ) + ); + } + + @Override + public final void testVisit() { + assertEmptyVisit(); + } +}