diff --git a/.backportrc.json b/.backportrc.json index 03f3f892f9227..20287f0bfc0e6 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", - "^v8.17.0$" : "8.x", + "^v8.18.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 19e99852869e6..5be5990cfb203 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 081d059460653..1a1e46d55f7a4 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 7dd8269f4ffe6..a49e486176484 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -9,6 +9,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -288,8 +289,8 @@ steps: env: BWC_VERSION: 8.15.4 - - label: "{{matrix.image}} / 8.16.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.1 + - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 timeout_in_minutes: 300 matrix: setup: @@ -302,7 +303,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 @@ -320,6 +321,22 @@ steps: env: BWC_VERSION: 8.17.0 + - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.18.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index f9f75488f0917..79e5a2e8dcdbb 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 79371d6ddccf5..aa1db893df8cc 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.1#bwcTest + - label: 8.16.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.2#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 retry: automatic: - exit_status: "-1" @@ -344,6 +344,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.18.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.18.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 @@ -429,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -471,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 04ccc41891b3b..8bec706bb758d 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -11,6 +11,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -38,6 +39,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -65,6 +67,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 85522e47a523f..a8d6dda4fb0c2 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,6 +15,7 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.4" - - "8.16.1" + - "8.16.2" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 9ea3072021bb3..5514fc376a285 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: - - "8.16.1" + - "8.16.2" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java index 3790be5f279d1..d44586ef4901a 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/script/ScriptScoreBenchmark.java @@ -34,6 +34,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.script.DocReader; @@ -76,8 +77,7 @@ public class ScriptScoreBenchmark { private final PluginsService pluginsService = new PluginsService( Settings.EMPTY, null, - null, - Path.of(System.getProperty("plugins.dir")) + new PluginsLoader(null, Path.of(System.getProperty("plugins.dir"))) ); private final ScriptModule scriptModule = new ScriptModule(Settings.EMPTY, pluginsService.filterPlugins(ScriptPlugin.class).toList()); diff --git a/branches.json b/branches.json index e81d511a88458..0e23a795664dd 100644 --- a/branches.json +++ b/branches.json @@ -7,6 +7,9 @@ { "branch": "8.16" }, + { + "branch": "8.17" + }, { "branch": "8.x" }, diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy index 94df02b280ca6..a4635a7232754 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy @@ -9,7 +9,6 @@ package org.elasticsearch.gradle.internal - import spock.lang.Unroll import com.github.tomakehurst.wiremock.WireMockServer @@ -24,8 +23,7 @@ import java.nio.file.Paths import java.util.regex.Matcher import java.util.regex.Pattern -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_ADOPTIUM -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_OPENJDK +import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.* class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { @@ -33,13 +31,11 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static final String ADOPT_JDK_VERSION = "12.0.2+10" private static final String ADOPT_JDK_VERSION_11 = "11.0.10+9" private static final String ADOPT_JDK_VERSION_15 = "15.0.2+7" + private static final String AZUL_JDK_VERSION_8 = "8u302+b08" + private static final String AZUL_8_DISTRO_VERSION = "8.56.0.23" private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)") - def setup() { - configurationCacheCompatible = false - } - @Unroll def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { given: @@ -56,14 +52,16 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { version = '$jdkVersion' platform = "$platform" architecture = '$arch' + distributionVersion = '$distributionVersion' } } - def theJdks = jdks +// def theJdks = jdks tasks.register("getJdk") { dependsOn jdks.myJdk + def jdk = jdks.myJdk doLast { - println "JDK HOME: " + theJdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -78,22 +76,23 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assertExtraction(result.output, expectedJavaBin); where: - platform | arch | jdkVendor | jdkVersion | expectedJavaBin | suffix - "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | "bin/java" | "(jdk 11)" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | "bin/java" | "(jdk 15)" + platform | arch | jdkVendor | jdkVersion | distributionVersion | expectedJavaBin | suffix + "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | null | "bin/java" | "(jdk 11)" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | null | "bin/java" | "(jdk 15)" + "darwin" | "aarch64" | VENDOR_ZULU | AZUL_JDK_VERSION_8 | AZUL_8_DISTRO_VERSION | "Contents/Home/bin/java" | "(jdk 8)" } def "transforms are reused across projects"() { @@ -118,9 +117,10 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { } } tasks.register("getJdk") { - dependsOn jdks.myJdk + def jdk = jdks.myJdk + dependsOn jdk doLast { - println "JDK HOME: " + jdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -137,7 +137,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { result.output.count("Unpacking linux-12.0.2-x64.tar.gz using ${SymbolicLinkPreservingUntarTransform.simpleName}") == 1 where: - platform | jdkVendor | jdkVersion | expectedJavaBin + platform | jdkVendor | jdkVersion | expectedJavaBin "linux" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" } @@ -159,6 +159,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { vendor = '$VENDOR_ADOPTIUM' version = '$ADOPT_JDK_VERSION' platform = "$platform" + distributionVersion = '$ADOPT_JDK_VERSION' architecture = "x64" } } @@ -204,6 +205,8 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assert matcher.find() == true; String jdkHome = matcher.group(1); Path javaPath = Paths.get(jdkHome, javaBin); + println "canonical " + javaPath.toFile().getCanonicalPath() + Paths.get(jdkHome).toFile().listFiles().each { println it } assert Files.exists(javaPath) == true; true } @@ -221,15 +224,26 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + effectivePlatform + "-x64_bin." + extension(platform); return "/java/GA/" + versionPath + "/GPL/" + filename; + } else if (vendor.equals(VENDOR_ZULU)) { + // we only have a single version of zulu currently in the tests + return "/zulu/bin/zulu8.56.0.23-ca-jdk8.0.302-macosx_aarch64.tar.gz" } } private static byte[] filebytes(final String vendor, final String platform) throws IOException { final String effectivePlatform = getPlatform(vendor, platform); if (vendor.equals(VENDOR_ADOPTIUM)) { - return JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_adoptium_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_adoptium_" + effectivePlatform + "." + extension(platform) + ).getBytes() } else if (vendor.equals(VENDOR_OPENJDK)) { - JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_openjdk_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_openjdk_" + effectivePlatform + "." + extension(platform) + ).getBytes() + } else { + // zulu + String resourcePath = "fake_zulu_" + effectivePlatform + "." + extension(platform) + return JdkDownloadPluginFuncTest.class.getResourceAsStream(resourcePath).getBytes() } } diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz new file mode 100644 index 0000000000000..87361b67ec76c Binary files /dev/null and b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz differ diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 493f7a505bb5b..3c9cf121813c9 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -25,12 +25,12 @@ if (buildParams.inFipsJvm) { File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks') - def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.4') - def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17') + def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.5') + def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19') def manualDebug = false; //change this to manually debug bouncy castle in an IDE if(manualDebug) { - bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.4') - bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17'){ + bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.5') + bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19'){ exclude group: 'org.bouncycastle', module: 'bc-fips' // to avoid jar hell } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy index ad37fa9f02c8c..6c87149095186 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntFixtureStop.groovy @@ -15,16 +15,12 @@ import org.elasticsearch.gradle.internal.test.AntFixture import org.gradle.api.file.FileSystemOperations import org.gradle.api.file.ProjectLayout import org.gradle.api.provider.ProviderFactory -import org.gradle.api.tasks.Internal import org.gradle.process.ExecOperations import javax.inject.Inject abstract class AntFixtureStop extends LoggedExec implements FixtureStop { - @Internal - AntFixture fixture - @Inject AntFixtureStop(ProjectLayout projectLayout, ExecOperations execOperations, @@ -34,12 +30,12 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } void setFixture(AntFixture fixture) { - assert this.fixture == null - this.fixture = fixture; - final Object pid = "${-> this.fixture.pid}" - onlyIf("pidFile exists") { fixture.pidFile.exists() } + def pidFile = fixture.pidFile + def fixtureName = fixture.name + final Object pid = "${-> Integer.parseInt(pidFile.getText('UTF-8').trim())}" + onlyIf("pidFile exists") { pidFile.exists() } doFirst { - logger.info("Shutting down ${fixture.name} with pid ${pid}") + logger.info("Shutting down ${fixtureName} with pid ${pid}") } if (OS.current() == OS.WINDOWS) { @@ -51,9 +47,8 @@ abstract class AntFixtureStop extends LoggedExec implements FixtureStop { } doLast { fileSystemOperations.delete { - it.delete(fixture.pidFile) + it.delete(pidFile) } } - this.fixture = fixture } } diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy index 81f21f8c62d86..01a3bdaee2337 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/AntTask.groovy @@ -29,11 +29,6 @@ import java.nio.charset.Charset */ public abstract class AntTask extends DefaultTask { - /** - * A buffer that will contain the output of the ant code run, - * if the output was not already written directly to stdout. - */ - public final ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() @Inject protected FileSystemOperations getFileSystemOperations() { @@ -57,6 +52,11 @@ public abstract class AntTask extends DefaultTask { // otherwise groovy replaces System.out, and you have no chance to debug // ant.saveStreams = false + /** + * A buffer that will contain the output of the ant code run, + * if the output was not already written directly to stdout. + */ + ByteArrayOutputStream outputBuffer = new ByteArrayOutputStream() final int outputLevel = logger.isDebugEnabled() ? Project.MSG_DEBUG : Project.MSG_INFO final PrintStream stream = useStdout() ? System.out : new PrintStream(outputBuffer, true, Charset.defaultCharset().name()) diff --git a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy index f2837ff40fb79..88a68f1194858 100644 --- a/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy +++ b/build-tools-internal/src/main/groovy/org/elasticsearch/gradle/internal/test/AntFixture.groovy @@ -10,22 +10,37 @@ package org.elasticsearch.gradle.internal.test import org.elasticsearch.gradle.OS + import org.elasticsearch.gradle.internal.AntFixtureStop import org.elasticsearch.gradle.internal.AntTask +import org.elasticsearch.gradle.testclusters.TestClusterInfo +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry import org.gradle.api.GradleException +import org.gradle.api.file.ProjectLayout +import org.gradle.api.provider.Property +import org.gradle.api.provider.Provider +import org.gradle.api.provider.ProviderFactory +import org.gradle.api.provider.ValueSource +import org.gradle.api.provider.ValueSourceParameters +import org.gradle.api.tasks.Input import org.gradle.api.tasks.Internal import org.gradle.api.tasks.TaskProvider +import javax.inject.Inject + /** * A fixture for integration tests which runs in a separate process launched by Ant. */ -class AntFixture extends AntTask implements Fixture { +class AntFixture extends AntTask { /** The path to the executable that starts the fixture. */ @Internal String executable private final List arguments = new ArrayList<>() + private ProjectLayout projectLayout + private final ProviderFactory providerFactory void args(Object... args) { arguments.addAll(args) @@ -69,19 +84,14 @@ class AntFixture extends AntTask implements Fixture { return tmpFile.exists() } - private final TaskProvider stopTask - - AntFixture() { - stopTask = createStopTask() + @Inject + AntFixture(ProjectLayout projectLayout, ProviderFactory providerFactory) { + this.providerFactory = providerFactory + this.projectLayout = projectLayout; + TaskProvider stopTask = createStopTask() finalizedBy(stopTask) } - @Override - @Internal - TaskProvider getStopTask() { - return stopTask - } - @Override protected void runAnt(AntBuilder ant) { // reset everything @@ -231,7 +241,7 @@ class AntFixture extends AntTask implements Fixture { */ @Internal protected File getBaseDir() { - return new File(project.buildDir, "fixtures/${name}") + return new File(projectLayout.getBuildDirectory().getAsFile().get(), "fixtures/${name}") } /** Returns the working directory for the process. Defaults to "cwd" inside baseDir. */ @@ -242,7 +252,7 @@ class AntFixture extends AntTask implements Fixture { /** Returns the file the process writes its pid to. Defaults to "pid" inside baseDir. */ @Internal - protected File getPidFile() { + File getPidFile() { return new File(baseDir, 'pid') } @@ -264,6 +274,12 @@ class AntFixture extends AntTask implements Fixture { return portsFile.readLines("UTF-8").get(0) } + @Internal + Provider getAddressAndPortProvider() { + File thePortFile = portsFile + return providerFactory.provider(() -> thePortFile.readLines("UTF-8").get(0)) + } + /** Returns a file that wraps around the actual command when {@code spawn == true}. */ @Internal protected File getWrapperScript() { @@ -281,4 +297,22 @@ class AntFixture extends AntTask implements Fixture { protected File getRunLog() { return new File(cwd, 'run.log') } + + @Internal + Provider getAddressAndPortSource() { + return providerFactory.of(AntFixtureValueSource.class, spec -> { + spec.getParameters().getPortFile().set(portsFile); + }); + } + + static abstract class AntFixtureValueSource implements ValueSource { + @Override + String obtain() { + return getParameters().getPortFile().map { it.readLines("UTF-8").get(0) }.get() + } + + interface Parameters extends ValueSourceParameters { + Property getPortFile(); + } + } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java index c70d421939d6d..4396a18c205c1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/Jdk.java @@ -11,6 +11,7 @@ import org.gradle.api.Buildable; import org.gradle.api.artifacts.Configuration; +import org.gradle.api.file.FileCollection; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskDependency; @@ -33,13 +34,14 @@ public class Jdk implements Buildable, Iterable { private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)\\+(b\\d+?)(@([a-f0-9]{32}))?"); private final String name; - private final Configuration configuration; + private final FileCollection configuration; private final Property vendor; private final Property version; private final Property platform; private final Property architecture; private final Property distributionVersion; + private final String configurationName; private String baseVersion; private String major; private String build; @@ -47,6 +49,7 @@ public class Jdk implements Buildable, Iterable { Jdk(String name, Configuration configuration, ObjectFactory objectFactory) { this.name = name; + this.configurationName = configuration.getName(); this.configuration = configuration; this.vendor = objectFactory.property(String.class); this.version = objectFactory.property(String.class); @@ -137,7 +140,7 @@ public String getPath() { } public String getConfigurationName() { - return configuration.getName(); + return configurationName; } @Override diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index 5b195cad3388f..3c278128e43f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -21,8 +21,6 @@ import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import java.util.Arrays; - /** * @deprecated We wanna get rid from this and custom jdk downloads via this plugin and * make leverage the gradle toolchain resolver capabilities. @@ -38,8 +36,8 @@ public class JdkDownloadPlugin implements Plugin { private static final String REPO_NAME_PREFIX = "jdk_repo_"; private static final String EXTENSION_NAME = "jdks"; - public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.+).jdk"; - public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*linux_aarch64"; + public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.*).jdk"; + public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*_aarch64"; @Override public void apply(Project project) { @@ -66,7 +64,8 @@ public void apply(Project project) { .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); - parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); + // parameters.setAsFiletreeOutput(true); + // parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); }); }); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java index 61dea47eb15c1..ca669276123b3 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/compat/compat/AbstractYamlRestCompatTestPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.Dependency; import org.gradle.api.file.Directory; +import org.gradle.api.file.FileCollection; import org.gradle.api.file.ProjectLayout; import org.gradle.api.file.RelativePath; import org.gradle.api.internal.file.FileOperations; @@ -244,10 +245,11 @@ public void apply(Project project) { yamlRestCompatTestTask.configure(testTask -> { testTask.systemProperty("tests.restCompat", true); // Use test runner and classpath from "normal" yaml source set + FileCollection outputFileCollection = yamlCompatTestSourceSet.getOutput(); testTask.setTestClassesDirs( yamlTestSourceSet.getOutput().getClassesDirs().plus(yamlCompatTestSourceSet.getOutput().getClassesDirs()) ); - testTask.onlyIf("Compatibility tests are available", t -> yamlCompatTestSourceSet.getOutput().isEmpty() == false); + testTask.onlyIf("Compatibility tests are available", t -> outputFileCollection.isEmpty() == false); testTask.setClasspath( yamlCompatTestSourceSet.getRuntimeClasspath() // remove the "normal" api and tests diff --git a/build-tools-internal/src/main/resources/checkstyle.xml b/build-tools-internal/src/main/resources/checkstyle.xml index daedc2ac3c629..9ed31d993909e 100644 --- a/build-tools-internal/src/main/resources/checkstyle.xml +++ b/build-tools-internal/src/main/resources/checkstyle.xml @@ -57,7 +57,7 @@ unfair. --> - + diff --git a/build-tools-internal/src/main/resources/fips_java.policy b/build-tools-internal/src/main/resources/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/build-tools-internal/src/main/resources/fips_java.policy +++ b/build-tools-internal/src/main/resources/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index a9da7995c2b36..68b97050ea012 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -167,3 +167,5 @@ org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#(java.lang.Str @defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug java.lang.Thread#(java.lang.Runnable) java.lang.Thread#(java.lang.ThreadGroup, java.lang.Runnable) + +org.elasticsearch.common.bytes.BytesReference#copyBytes(org.elasticsearch.common.bytes.BytesReference) @ This method is a subject for removal. Copying bytes is prone to performance regressions and unnecessary allocations. diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index ec341ecfd8b79..77393fe16b4c2 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -76,6 +76,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final LinkedHashMap> waitConditions = new LinkedHashMap<>(); private final transient Project project; private final Provider reaper; + private final Provider testClustersRegistryProvider; private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -87,11 +88,14 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private boolean shared = false; + private int claims = 0; + public ElasticsearchCluster( String path, String clusterName, Project project, Provider reaper, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -104,6 +108,7 @@ public ElasticsearchCluster( this.clusterName = clusterName; this.project = project; this.reaper = reaper; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -120,6 +125,7 @@ public ElasticsearchCluster( clusterName + "-0", project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -177,6 +183,7 @@ public void setNumberOfNodes(int numberOfNodes) { clusterName + "-" + i, project, reaper, + testClustersRegistryProvider, fileSystemOperations, archiveOperations, execOperations, @@ -408,6 +415,7 @@ public void setPreserveDataDir(boolean preserveDataDir) { public void freeze() { nodes.forEach(ElasticsearchNode::freeze); configurationFrozen.set(true); + nodes.whenObjectAdded(node -> { throw new IllegalStateException("Cannot add nodes to test cluster after is has been frozen"); }); } private void checkFrozen() { @@ -663,4 +671,11 @@ public String toString() { return "cluster{" + path + ":" + clusterName + "}"; } + int addClaim() { + return ++this.claims; + } + + int removeClaim() { + return --this.claims; + } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index df11733928f0f..90162591cfcef 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -124,6 +124,8 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final String name; transient private final Project project; private final Provider reaperServiceProvider; + private final Provider testClustersRegistryProvider; + private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; @@ -164,7 +166,6 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final List distributions = new ArrayList<>(); private int currentDistro = 0; private TestDistribution testDistribution; - private volatile Process esProcess; private Function nameCustomization = s -> s; private boolean isWorkingDirConfigured = false; private String httpPort = "0"; @@ -179,6 +180,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { String name, Project project, Provider reaperServiceProvider, + Provider testClustersRegistryProvider, FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, ExecOperations execOperations, @@ -191,6 +193,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { this.name = name; this.project = project; this.reaperServiceProvider = reaperServiceProvider; + this.testClustersRegistryProvider = testClustersRegistryProvider; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; this.execOperations = execOperations; @@ -892,11 +895,13 @@ private void startElasticsearchProcess() { } } LOGGER.info("Running `{}` in `{}` for {} env: {}", command, workingDir, this, environment); + Process esProcess; try { esProcess = processBuilder.start(); } catch (IOException e) { throw new TestClustersException("Failed to start ES process for " + this, e); } + testClustersRegistryProvider.get().storeProcess(id(), esProcess); reaperServiceProvider.get().registerPid(toString(), esProcess.pid()); } @@ -982,6 +987,7 @@ public synchronized void stop(boolean tailLogs) { } catch (IOException e) { throw new UncheckedIOException(e); } + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); if (esProcess == null && tailLogs) { // This is a special case. If start() throws an exception the plugin will still call stop // Another exception here would eat the orriginal. @@ -1574,6 +1580,7 @@ public List getFeatureFlags() { @Override @Internal public boolean isProcessAlive() { + Process esProcess = testClustersRegistryProvider.get().getProcess(id()); requireNonNull(esProcess, "Can't wait for `" + this + "` as it's not started. Does the task have `useCluster` ?"); return esProcess.isAlive(); } @@ -1602,6 +1609,10 @@ public int hashCode() { @Override public String toString() { + return id() + " (" + System.identityHashCode(this) + ")"; + } + + private String id() { return "node{" + path + ":" + name + "}"; } @@ -1702,7 +1713,7 @@ public CharSequence[] getArgs() { } } - private record FeatureFlag(String feature, Version from, Version until) { + public record FeatureFlag(String feature, Version from, Version until) { @Input public String getFeature() { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java new file mode 100644 index 0000000000000..07663de7a9df9 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterInfo.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.gradle.testclusters; + +import java.io.File; +import java.util.List; + +public class TestClusterInfo { + private final List allHttpSocketURI; + private final List allTransportPortURI; + private final List auditLogs; + + public TestClusterInfo(List allHttpSocketURI, List allTransportPortURI, List auditLogs) { + this.allHttpSocketURI = allHttpSocketURI; + this.allTransportPortURI = allTransportPortURI; + this.auditLogs = auditLogs; + } + + public List getAllHttpSocketURI() { + return allHttpSocketURI; + } + + public List getAllTransportPortURI() { + return allTransportPortURI; + } + + public List getAuditLogs() { + return auditLogs; + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java new file mode 100644 index 0000000000000..8ecadcdc6d2b1 --- /dev/null +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterValueSource.java @@ -0,0 +1,34 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.gradle.testclusters; + +import org.gradle.api.provider.Property; +import org.gradle.api.provider.ValueSource; +import org.gradle.api.provider.ValueSourceParameters; +import org.jetbrains.annotations.Nullable; + +public abstract class TestClusterValueSource implements ValueSource { + + @Nullable + @Override + public TestClusterInfo obtain() { + String clusterName = getParameters().getClusterName().get(); + String path = getParameters().getPath().get(); + return getParameters().getService().get().getClusterDetails(path, clusterName); + } + + interface Parameters extends ValueSourceParameters { + Property getClusterName(); + + Property getPath(); + + Property getService(); + } +} diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java index f84aa2a0389c2..9e5fc1f09ac9e 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersAware.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.gradle.testclusters; +import org.elasticsearch.gradle.ElasticsearchDistribution; import org.gradle.api.Task; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -34,10 +35,15 @@ default void useCluster(ElasticsearchCluster cluster) { if (cluster.getPath().equals(getProject().getPath()) == false) { throw new TestClustersException("Task " + getPath() + " can't use test cluster from" + " another project " + cluster); } - - cluster.getNodes() - .all(node -> node.getDistributions().forEach(distro -> dependsOn(getProject().provider(() -> distro.maybeFreeze())))); - dependsOn(cluster.getPluginAndModuleConfigurations()); + if (cluster.getName().equals(getName())) { + for (ElasticsearchNode node : cluster.getNodes()) { + for (ElasticsearchDistribution distro : node.getDistributions()) { + ElasticsearchDistribution frozenDistro = distro.maybeFreeze(); + dependsOn(frozenDistro); + } + } + dependsOn(cluster.getPluginAndModuleConfigurations()); + } getClusters().add(cluster); } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index 301782d52d1a3..ada31bc11a653 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -26,6 +26,7 @@ import org.gradle.api.invocation.Gradle; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; @@ -106,15 +107,22 @@ public void apply(Project project) { runtimeJavaProvider = providerFactory.provider( () -> System.getenv("RUNTIME_JAVA_HOME") == null ? Jvm.current().getJavaHome() : new File(System.getenv("RUNTIME_JAVA_HOME")) ); + + // register cluster registry as a global build service + Provider testClustersRegistryProvider = project.getGradle() + .getSharedServices() + .registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); + // enable the DSL to describe clusters - NamedDomainObjectContainer container = createTestClustersContainerExtension(project, reaperServiceProvider); + NamedDomainObjectContainer container = createTestClustersContainerExtension( + project, + testClustersRegistryProvider, + reaperServiceProvider + ); // provide a task to be able to list defined clusters. createListClustersTask(project, container); - // register cluster registry as a global build service - project.getGradle().getSharedServices().registerIfAbsent(REGISTRY_SERVICE_NAME, TestClustersRegistry.class, noop()); - // register throttle so we only run at most max-workers/2 nodes concurrently Provider testClustersThrottleProvider = project.getGradle() .getSharedServices() @@ -145,6 +153,7 @@ private void configureArtifactTransforms(Project project) { private NamedDomainObjectContainer createTestClustersContainerExtension( Project project, + Provider testClustersRegistryProvider, Provider reaper ) { // Create an extensions that allows describing clusters @@ -155,6 +164,7 @@ private NamedDomainObjectContainer createTestClustersConta name, project, reaper, + testClustersRegistryProvider, getFileSystemOperations(), getArchiveOperations(), getExecOperations(), @@ -199,7 +209,9 @@ public void apply(Project project) { Provider testClusterTasksService = project.getGradle() .getSharedServices() - .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> {}); + .registerIfAbsent(TEST_CLUSTER_TASKS_SERVICE, TaskEventsService.class, spec -> { + spec.getParameters().getRegistry().set(registryProvider); + }); TestClustersRegistry registry = registryProvider.get(); // When we know what tasks will run, we claim the clusters of those task to differentiate between clusters @@ -209,7 +221,7 @@ public void apply(Project project) { configureClaimClustersHook(project.getGradle(), registry); // Before each task, we determine if a cluster needs to be started for that task. - configureStartClustersHook(project.getGradle(), registry, testClusterTasksService); + configureStartClustersHook(project.getGradle()); // After each task we determine if there are clusters that are no longer needed. getEventsListenerRegistry().onTaskCompletion(testClusterTasksService); @@ -228,12 +240,7 @@ private static void configureClaimClustersHook(Gradle gradle, TestClustersRegist }); } - private void configureStartClustersHook( - Gradle gradle, - TestClustersRegistry registry, - Provider testClusterTasksService - ) { - testClusterTasksService.get().registry(registry); + private void configureStartClustersHook(Gradle gradle) { gradle.getTaskGraph().whenReady(taskExecutionGraph -> { taskExecutionGraph.getAllTasks() .stream() @@ -249,19 +256,14 @@ private void configureStartClustersHook( } } - static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { + static public abstract class TaskEventsService implements BuildService, OperationCompletionListener { Map tasksMap = new HashMap<>(); - private TestClustersRegistry registryProvider; public void register(TestClustersAware task) { tasksMap.put(task.getPath(), task); } - public void registry(TestClustersRegistry registry) { - this.registryProvider = registry; - } - @Override public void onFinish(FinishEvent finishEvent) { if (finishEvent instanceof TaskFinishEvent taskFinishEvent) { @@ -273,11 +275,18 @@ public void onFinish(FinishEvent finishEvent) { if (task.getDidWork()) { task.getClusters() .forEach( - cluster -> registryProvider.stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) + cluster -> getParameters().getRegistry() + .get() + .stopCluster(cluster, taskFinishEvent.getResult() instanceof TaskFailureResult) ); } } } } + + // Some parameters for the web server + interface Params extends BuildServiceParameters { + Property getRegistry(); + } } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java index 8de0dd67b654c..8d2a9217e7d0c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersRegistry.java @@ -10,6 +10,8 @@ import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; +import org.gradle.api.provider.Provider; +import org.gradle.api.provider.ProviderFactory; import org.gradle.api.services.BuildService; import org.gradle.api.services.BuildServiceParameters; @@ -17,20 +19,23 @@ import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import javax.inject.Inject; public abstract class TestClustersRegistry implements BuildService { private static final Logger logger = Logging.getLogger(TestClustersRegistry.class); private static final String TESTCLUSTERS_INSPECT_FAILURE = "testclusters.inspect.failure"; private final Boolean allowClusterToSurvive = Boolean.valueOf(System.getProperty(TESTCLUSTERS_INSPECT_FAILURE, "false")); - private final Map claimsInventory = new HashMap<>(); - private final Set runningClusters = new HashSet<>(); + private final Map nodeProcesses = new HashMap<>(); + + @Inject + public abstract ProviderFactory getProviderFactory(); public void claimCluster(ElasticsearchCluster cluster) { - cluster.freeze(); - int claim = claimsInventory.getOrDefault(cluster, 0) + 1; - claimsInventory.put(cluster, claim); - if (claim > 1) { + int claims = cluster.addClaim(); + if (claims > 1) { cluster.setShared(true); } } @@ -43,6 +48,13 @@ public void maybeStartCluster(ElasticsearchCluster cluster) { cluster.start(); } + public Provider getClusterInfo(String clusterName) { + return getProviderFactory().of(TestClusterValueSource.class, spec -> { + spec.getParameters().getService().set(TestClustersRegistry.this); + spec.getParameters().getClusterName().set(clusterName); + }); + } + public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { if (taskFailed) { // If the task fails, and other tasks use this cluster, the other task will likely never be @@ -67,8 +79,7 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { runningClusters.remove(cluster); } } else { - int currentClaims = claimsInventory.getOrDefault(cluster, 0) - 1; - claimsInventory.put(cluster, currentClaims); + int currentClaims = cluster.removeClaim(); if (currentClaims <= 0 && runningClusters.contains(cluster)) { cluster.stop(false); runningClusters.remove(cluster); @@ -76,4 +87,33 @@ public void stopCluster(ElasticsearchCluster cluster, boolean taskFailed) { } } + public TestClusterInfo getClusterDetails(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + return new TestClusterInfo( + cluster.getAllHttpSocketURI(), + cluster.getAllTransportPortURI(), + cluster.getNodes().stream().map(n -> n.getAuditLog()).collect(Collectors.toList()) + ); + } + + public void restart(String path, String clusterName) { + ElasticsearchCluster cluster = runningClusters.stream() + .filter(c -> c.getPath().equals(path)) + .filter(c -> c.getName().equals(clusterName)) + .findFirst() + .orElseThrow(); + cluster.restart(); + } + + public void storeProcess(String id, Process esProcess) { + nodeProcesses.put(id, esProcess); + } + + public Process getProcess(String id) { + return nodeProcesses.get(id); + } } diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 918980fea616a..7d60137ac86b1 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -335,7 +335,6 @@ Closure commonDebConfig(String architecture) { // versions found on oldest supported distro, centos-6 requires('bash', '4.1', GREATER | EQUAL) - requires('lsb-base', '4', GREATER | EQUAL) requires 'libc6' requires 'adduser' diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index edd705b66caaa..1622d8d8aeb40 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -5,8 +5,6 @@ changelog-file-missing-in-native-package # we intentionally copy our copyright file for all deb packages -copyright-file-contains-full-apache-2-license -copyright-not-using-common-license-for-apache2 copyright-without-copyright-notice # we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages @@ -16,37 +14,23 @@ arch-dependent-file-in-usr-share missing-dep-on-jarwrapper # we prefer to not make our config and log files world readable -non-standard-file-perm etc/default/elasticsearch 0660 != 0644 -non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 -non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 -non-standard-file-perm etc/elasticsearch/* -non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 -non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 - -# this lintian tag is simply wrong; contrary to the explanation, Debian systemd -# does actually look at /usr/lib/systemd/system -systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service +non-standard-file-perm 0660 != 0644 [etc/default/elasticsearch] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/jvm.options.d/] +non-standard-file-perm 0660 != 0644 [etc/elasticsearch/*] +non-standard-dir-perm 2750 != 0755 [var/lib/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [var/log/elasticsearch/] # the package scripts handle systemd directly and don't need to use deb helpers maintainer-script-calls-systemctl # bundled JDK embedded-library -unstripped-binary-or-object usr/share/elasticsearch/jdk/* -extra-license-file usr/share/elasticsearch/jdk/legal/* -hardening-no-pie usr/share/elasticsearch/jdk/bin/* -hardening-no-pie usr/share/elasticsearch/jdk/lib/* +unstripped-binary-or-object [usr/share/elasticsearch/jdk/*] # the system java version that lintian assumes is far behind what elasticsearch uses unknown-java-class-version -# elastic licensed modules contain elastic license -extra-license-file usr/share/elasticsearch/modules/* - -# This dependency appears to have a packaging flaw, and includes a -# generated source file alongside the compiled version -jar-contains-source usr/share/elasticsearch/modules/repository-gcs/api-common*.jar * - # There's no `License` field in Debian control files, but earlier versions # of `lintian` were more permissive. Override this warning so that we can # run `lintian` on different releases of Debian. The format of this override @@ -58,8 +42,27 @@ unknown-field License # indirectly to libc via libdl. This might not be best practice but we # don't build them ourselves and the license precludes us modifying them # to fix this. -library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so +library-not-linked-against-libc [usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so*] + + +# Below is the copy of some of the above rules in format for Lintian versions <= 2.104 (Debian 11) +# Override syntax changes between Lintian versions in a non-backwards compatible way, so we handle it with +# duplication and ignoring some issues in the test code. + + +# we prefer to not make our config and log files world readable +non-standard-file-perm etc/default/elasticsearch 0660 != 0644 +non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 +non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 +non-standard-file-perm etc/elasticsearch/* +non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 +non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 -# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so -# which has no dependencies (not even libc) besides the symbols in the base executable. -shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so +# bundled JDK +unstripped-binary-or-object usr/share/elasticsearch/jdk/* + +# Intel MKL libraries are not linked directly to libc. They are linked +# indirectly to libc via libdl. This might not be best practice but we +# don't build them ourselves and the license precludes us modifying them +# to fix this. +library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so* diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index ac8ade89c9014..57750f2162a71 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -29,7 +29,7 @@ dependencies { implementation 'org.ow2.asm:asm-tree:9.7' api "org.bouncycastle:bcpg-fips:1.0.7.1" - api "org.bouncycastle:bc-fips:1.0.2.4" + api "org.bouncycastle:bc-fips:1.0.2.5" testImplementation project(":test:framework") testImplementation "com.google.jimfs:jimfs:${versions.jimfs}" testRuntimeOnly "com.google.guava:guava:${versions.jimfs_guava}" diff --git a/docs/changelog/112989.yaml b/docs/changelog/112989.yaml new file mode 100644 index 0000000000000..364f012f94420 --- /dev/null +++ b/docs/changelog/112989.yaml @@ -0,0 +1,5 @@ +pr: 112989 +summary: Upgrade Bouncy Castle FIPS dependencies +area: Security +type: upgrade +issues: [] diff --git a/docs/changelog/114914.yaml b/docs/changelog/114914.yaml new file mode 100644 index 0000000000000..bad13e26682dc --- /dev/null +++ b/docs/changelog/114914.yaml @@ -0,0 +1,5 @@ +pr: 114914 +summary: Adding chunking settings to `IbmWatsonxService` +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/115616.yaml b/docs/changelog/115616.yaml new file mode 100644 index 0000000000000..4fb4dc18538de --- /dev/null +++ b/docs/changelog/115616.yaml @@ -0,0 +1,6 @@ +pr: 115616 +summary: Fix double lookup failure on ESQL +area: ES|QL +type: bug +issues: + - 111398 diff --git a/docs/changelog/116115.yaml b/docs/changelog/116115.yaml new file mode 100644 index 0000000000000..33e1735c20ca4 --- /dev/null +++ b/docs/changelog/116115.yaml @@ -0,0 +1,5 @@ +pr: 116115 +summary: Allow http unsafe buffers by default +area: Network +type: enhancement +issues: [] diff --git a/docs/changelog/116358.yaml b/docs/changelog/116358.yaml new file mode 100644 index 0000000000000..58b44a1e9bcf5 --- /dev/null +++ b/docs/changelog/116358.yaml @@ -0,0 +1,5 @@ +pr: 116358 +summary: Update Deberta tokenizer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/116692.yaml b/docs/changelog/116692.yaml new file mode 100644 index 0000000000000..30f9e62095436 --- /dev/null +++ b/docs/changelog/116692.yaml @@ -0,0 +1,5 @@ +pr: 116692 +summary: Remove all mentions of eis and gateway and deprecate flags that do +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/116765.yaml b/docs/changelog/116765.yaml new file mode 100644 index 0000000000000..ec2357c17acaf --- /dev/null +++ b/docs/changelog/116765.yaml @@ -0,0 +1,5 @@ +pr: 116765 +summary: Metrics for incremental bulk splits +area: Distributed +type: enhancement +issues: [] diff --git a/docs/changelog/116819.yaml b/docs/changelog/116819.yaml new file mode 100644 index 0000000000000..afe06c583fe55 --- /dev/null +++ b/docs/changelog/116819.yaml @@ -0,0 +1,5 @@ +pr: 116819 +summary: ESQL - Add match operator (:) +area: Search +type: feature +issues: [] diff --git a/docs/changelog/116980.yaml b/docs/changelog/116980.yaml new file mode 100644 index 0000000000000..140324fd40b92 --- /dev/null +++ b/docs/changelog/116980.yaml @@ -0,0 +1,6 @@ +pr: 116980 +summary: "ESQL: Fix sorts containing `_source`" +area: ES|QL +type: bug +issues: + - 116659 diff --git a/docs/changelog/117095.yaml b/docs/changelog/117095.yaml new file mode 100644 index 0000000000000..27460924ecb71 --- /dev/null +++ b/docs/changelog/117095.yaml @@ -0,0 +1,5 @@ +pr: 117095 +summary: Add version prefix to Inference Service API path +area: Inference +type: enhancement +issues: [] diff --git a/docs/changelog/117148.yaml b/docs/changelog/117148.yaml new file mode 100644 index 0000000000000..92dd69672616a --- /dev/null +++ b/docs/changelog/117148.yaml @@ -0,0 +1,5 @@ +pr: 117148 +summary: Preserve thread context when waiting for segment generation in RTG +area: CRUD +type: bug +issues: [] diff --git a/docs/changelog/117153.yaml b/docs/changelog/117153.yaml new file mode 100644 index 0000000000000..f7640c0a7ed6a --- /dev/null +++ b/docs/changelog/117153.yaml @@ -0,0 +1,5 @@ +pr: 117153 +summary: "ESQL: fix the column position in errors" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/117182.yaml b/docs/changelog/117182.yaml new file mode 100644 index 0000000000000..b5398bec1ef30 --- /dev/null +++ b/docs/changelog/117182.yaml @@ -0,0 +1,6 @@ +pr: 117182 +summary: Change synthetic source logic for `constant_keyword` +area: Mapping +type: bug +issues: + - 117083 diff --git a/docs/changelog/117189.yaml b/docs/changelog/117189.yaml new file mode 100644 index 0000000000000..e89c2d81506d9 --- /dev/null +++ b/docs/changelog/117189.yaml @@ -0,0 +1,5 @@ +pr: 117189 +summary: Fix deberta tokenizer bug caused by bug in normalizer +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/117243.yaml b/docs/changelog/117243.yaml new file mode 100644 index 0000000000000..f871d476bd0ec --- /dev/null +++ b/docs/changelog/117243.yaml @@ -0,0 +1,5 @@ +pr: 117243 +summary: Bump major version for feature migration system indices +area: Infra/Core +type: upgrade +issues: [] diff --git a/docs/changelog/117316.yaml b/docs/changelog/117316.yaml new file mode 100644 index 0000000000000..69474d68a8190 --- /dev/null +++ b/docs/changelog/117316.yaml @@ -0,0 +1,5 @@ +pr: 117316 +summary: Fix validation of SORT by aggregate functions +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index ca3d100e31e06..3d8bdcca07e2b 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -59,8 +59,8 @@ An example of a transient update: ==== We no longer recommend using transient cluster settings. Use persistent cluster settings instead. If a cluster becomes unstable, transient settings can clear -unexpectedly, resulting in a potentially undesired cluster configuration. See -the <>. +unexpectedly, resulting in a potentially undesired cluster configuration. +// See the <>. ==== // end::transient-settings-warning[] diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json new file mode 100644 index 0000000000000..7a0ace6168b59 --- /dev/null +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -0,0 +1,85 @@ +{ + "comment" : "This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it.", + "type" : "operator", + "name" : "match_operator", + "description" : "Performs a match query on the specified field. Returns true if the provided query matches the row.", + "signatures" : [ + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + } + ], + "examples" : [ + "FROM books \n| WHERE MATCH(author, \"Faulkner\")\n| KEEP book_no, author \n| SORT book_no \n| LIMIT 5;" + ], + "preview" : true, + "snapshot_only" : false +} diff --git a/docs/reference/esql/functions/kibana/docs/match_operator.md b/docs/reference/esql/functions/kibana/docs/match_operator.md new file mode 100644 index 0000000000000..fda8b24ff76cc --- /dev/null +++ b/docs/reference/esql/functions/kibana/docs/match_operator.md @@ -0,0 +1,14 @@ + + +### MATCH_OPERATOR +Performs a match query on the specified field. Returns true if the provided query matches the row. + +``` +FROM books +| WHERE MATCH(author, "Faulkner") +| KEEP book_no, author +| SORT book_no +| LIMIT 5; +``` diff --git a/docs/reference/esql/functions/operators.asciidoc b/docs/reference/esql/functions/operators.asciidoc index ee344a52687c2..a1a2226335e9b 100644 --- a/docs/reference/esql/functions/operators.asciidoc +++ b/docs/reference/esql/functions/operators.asciidoc @@ -16,6 +16,7 @@ Boolean operators for comparing against one or multiple expressions. * <> * <> * <> +* experimental:[] <> // end::op_list[] include::binary.asciidoc[] @@ -26,3 +27,4 @@ include::cast.asciidoc[] include::in.asciidoc[] include::like.asciidoc[] include::rlike.asciidoc[] +include::search.asciidoc[] diff --git a/docs/reference/esql/functions/search.asciidoc b/docs/reference/esql/functions/search.asciidoc new file mode 100644 index 0000000000000..ae1b003b65abb --- /dev/null +++ b/docs/reference/esql/functions/search.asciidoc @@ -0,0 +1,23 @@ +[discrete] +[[esql-search-operators]] +=== Search operators + +The only search operator is match (`:`). + +preview::["Do not use on production environments. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features."] + +The match operator performs a <> on the specified field. Returns true if the provided query matches the row. + +[.text-center] +image::esql/functions/signature/match_operator.svg[Embedded,opts=inline] + +include::types/match.asciidoc[] + +[source.merge.styled,esql] +---- +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/match-operator.csv-spec[tag=match-with-field-result] +|=== diff --git a/docs/reference/esql/functions/signature/match_operator.svg b/docs/reference/esql/functions/signature/match_operator.svg new file mode 100644 index 0000000000000..70cea841622eb --- /dev/null +++ b/docs/reference/esql/functions/signature/match_operator.svg @@ -0,0 +1 @@ +field:query \ No newline at end of file diff --git a/docs/reference/esql/functions/types/match_operator.asciidoc b/docs/reference/esql/functions/types/match_operator.asciidoc new file mode 100644 index 0000000000000..7523b29c62b1d --- /dev/null +++ b/docs/reference/esql/functions/types/match_operator.asciidoc @@ -0,0 +1,12 @@ +// This is generated by ESQL's AbstractFunctionTestCase. Do no edit it. See ../README.md for how to regenerate it. + +*Supported types* + +[%header.monospaced.styled,format=dsv,separator=|] +|=== +field | query | result +keyword | keyword | boolean +keyword | text | boolean +text | keyword | boolean +text | text | boolean +|=== diff --git a/docs/reference/esql/processing-commands/stats.asciidoc b/docs/reference/esql/processing-commands/stats.asciidoc index 0c479c1f62b76..3ed296fb6db24 100644 --- a/docs/reference/esql/processing-commands/stats.asciidoc +++ b/docs/reference/esql/processing-commands/stats.asciidoc @@ -1,16 +1,18 @@ [discrete] [[esql-stats-by]] -=== `STATS ... BY` +=== `STATS` -The `STATS ... BY` processing command groups rows according to a common value +The `STATS` processing command groups rows according to a common value and calculates one or more aggregated values over the grouped rows. **Syntax** [source,esql] ---- -STATS [column1 =] expression1[, ..., [columnN =] expressionN] -[BY grouping_expression1[, ..., grouping_expressionN]] +STATS [column1 =] expression1 [WHERE boolean_expression1][, + ..., + [columnN =] expressionN [WHERE boolean_expressionN]] + [BY grouping_expression1[, ..., grouping_expressionN]] ---- *Parameters* @@ -28,14 +30,18 @@ An expression that computes an aggregated value. An expression that outputs the values to group by. If its name coincides with one of the computed columns, that column will be ignored. +`boolean_expressionX`:: +The condition that must be met for a row to be included in the evaluation of `expressionX`. + NOTE: Individual `null` values are skipped when computing aggregations. *Description* -The `STATS ... BY` processing command groups rows according to a common value -and calculate one or more aggregated values over the grouped rows. If `BY` is -omitted, the output table contains exactly one row with the aggregations applied -over the entire dataset. +The `STATS` processing command groups rows according to a common value +and calculates one or more aggregated values over the grouped rows. For the +calculation of each aggregated value, the rows in a group can be filtered with +`WHERE`. If `BY` is omitted, the output table contains exactly one row with +the aggregations applied over the entire dataset. The following <> are supported: @@ -90,6 +96,29 @@ include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues] include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues-result] |=== +To filter the rows that go into an aggregation, use the `WHERE` clause: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFiltering] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFiltering-result] +|=== + +The aggregations can be mixed, with and without a filter and grouping is +optional as well: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup-result] +|=== + [[esql-stats-mv-group]] If the grouping key is multivalued then the input row is in all groups: @@ -109,7 +138,7 @@ It's also possible to group by multiple values: include::{esql-specs}/stats.csv-spec[tag=statsGroupByMultipleValues] ---- -If the all grouping keys are multivalued then the input row is in all groups: +If all the grouping keys are multivalued then the input row is in all groups: [source.merge.styled,esql] ---- @@ -121,7 +150,7 @@ include::{esql-specs}/stats.csv-spec[tag=multi-mv-group-result] |=== Both the aggregating functions and the grouping expressions accept other -functions. This is useful for using `STATS...BY` on multivalue columns. +functions. This is useful for using `STATS` on multivalue columns. For example, to calculate the average salary change, you can use `MV_AVG` to first average the multiple values per employee, and use the result with the `AVG` function: diff --git a/docs/reference/migration/migrate_8_18.asciidoc b/docs/reference/migration/migrate_8_18.asciidoc new file mode 100644 index 0000000000000..c989ff9f85b6d --- /dev/null +++ b/docs/reference/migration/migrate_8_18.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.18]] +== Migrating to 8.18 +++++ +8.18 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.18. + +See also <> and <>. + +coming::[8.18.0] + + +[discrete] +[[breaking-changes-8.18]] +=== Breaking changes + +There are no breaking changes in {es} 8.18. + diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 6569647fd993e..5048220966bba 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[migrating-9.0]] == Migrating to 9.0 ++++ @@ -23,204 +20,229 @@ The following changes in {es} 9.0 might affect your applications and prevent them from operating normally. Before upgrading to 9.0, review these changes and take the described steps to mitigate the impact. -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[set_lenient_to_true_by_default_when_using_updateable_synonyms]] -// .Set lenient to true by default when using updateable synonyms -// [%collapsible] -// ==== -// *Details* + -// When a `synonym` or `synonym_graph` token filter is configured with `updateable: true`, the default `lenient` -// value will now be `true`. -// -// *Impact* + -// `synonym` or `synonym_graph` token filters configured with `updateable: true` will ignore invalid synonyms by -// default. This prevents shard initialization errors on invalid synonyms. -// ==== -// -// [discrete] -// [[breaking_90_mapping_changes]] -// ==== Mapping changes -// -// [[jdk_locale_database_change]] -// .JDK locale database change -// [%collapsible] -// ==== -// *Details* + -// {es} 8.16 changes the version of the JDK that is included from version 22 to version 23. This changes the locale database that is used by Elasticsearch from the COMPAT database to the CLDR database. This change can cause significant differences to the textual date formats accepted by Elasticsearch, and to calculated week-dates. -// -// If you run {es} 8.16 on JDK version 22 or below, it will use the COMPAT locale database to match the behavior of 8.15. However, starting with {es} 9.0, {es} will use the CLDR database regardless of JDK version it is run on. -// -// *Impact* + -// This affects you if you use custom date formats using textual or week-date field specifiers. If you use date fields or calculated week-dates that change between the COMPAT and CLDR databases, then this change will cause Elasticsearch to reject previously valid date fields as invalid data. You might need to modify your ingest or output integration code to account for the differences between these two JDK versions. -// -// Starting in version 8.15.2, Elasticsearch will log deprecation warnings if you are using date format specifiers that might change on upgrading to JDK 23. These warnings are visible in Kibana. -// -// For detailed guidance, refer to <> and the https://ela.st/jdk-23-locales[Elastic blog]. -// ==== -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[snowball_stemmers_have_been_upgraded]] -// .Snowball stemmers have been upgraded -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. -// ==== -// -// [[german2_snowball_stemmer_an_alias_for_german_stemmer]] -// .The "german2" snowball stemmer is now an alias for the "german" stemmer -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) -// -// *Impact* + -// Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. -// ==== -// -// [[persian_analyzer_has_stemmer_by_default]] -// .The 'persian' analyzer has stemmer by default -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. -// -// *Impact* + -// Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. -// ==== -// -// [[korean_dictionary_for_nori_has_been_updated]] -// .The Korean dictionary for Nori has been updated -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. -// ==== -// -// [discrete] -// [[breaking_90_cluster_and_node_setting_changes]] -// ==== Cluster and node setting changes -// -// [[remove_unsupported_legacy_value_for_discovery_type]] -// .Remove unsupported legacy value for `discovery.type` -// [%collapsible] -// ==== -// *Details* + -// Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. -// -// *Impact* + -// Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. -// ==== -// -// [discrete] -// [[breaking_90_es_ql_changes]] -// ==== ES|QL changes -// -// [[esql_entirely_remove_meta_functions]] -// .ESQL: Entirely remove META FUNCTIONS -// [%collapsible] -// ==== -// *Details* + -// Removes an undocumented syntax from ESQL: META FUNCTION. This was never -// reliable or really useful. Consult the documentation instead. -// -// *Impact* + -// Removes an undocumented syntax from ESQL: META FUNCTION -// ==== -// -// [discrete] -// [[breaking_90_rest_api_changes]] -// ==== REST API changes -// -// [[remove_cluster_state_from_cluster_reroute_response]] -// .Remove cluster state from `/_cluster/reroute` response -// [%collapsible] -// ==== -// *Details* + -// The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. -// -// *Impact* + -// Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. -// ==== -// -// [[remove_deprecated_local_attribute_from_alias_apis]] -// .Remove deprecated local attribute from alias APIs -// [%collapsible] -// ==== -// *Details* + -// The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. -// -// *Impact* + -// Cease usage of the `?local` query parameter when calling the listed APIs. -// ==== -// -// [[reworking_rrf_retriever_to_be_evaluated_during_rewrite_phase]] -// .Reworking RRF retriever to be evaluated during rewrite phase -// [%collapsible] -// ==== -// *Details* + -// In this release (8.16), we have introduced major changes to the retrievers framework -// and how they can be evaluated, focusing mainly on compound retrievers -// like `rrf` and `text_similarity_reranker`, which allowed us to support full -// composability (i.e. any retriever can be nested under any compound retriever), -// as well as supporting additional search features like collapsing, explaining, -// aggregations, and highlighting. -// -// To ensure consistency, and given that this rework is not available until 8.16, -// `rrf` and `text_similarity_reranker` retriever queries would now -// throw an exception in a mixed cluster scenario, where there are nodes -// both in current or later (i.e. >= 8.16) and previous ( <= 8.15) versions. -// -// As part of the rework, we have also removed the `_rank` property from -// the responses of an `rrf` retriever. -// -// *Impact* + -// - Users will not be able to use the `rrf` and `text_similarity_reranker` retrievers in a mixed cluster scenario -// with previous releases (i.e. prior to 8.16), and the request will throw an `IllegalArgumentException`. -// - `_rank` has now been removed from the output of the `rrf` retrievers so trying to directly parse the field -// will throw an exception -// ==== -// -// [[update_data_stream_lifecycle_telemetry_to_track_global_retention]] -// .Update data stream lifecycle telemetry to track global retention -// [%collapsible] -// ==== -// *Details* + -// In this release we introduced global retention settings that fulfil the following criteria: -// -// - a data stream managed by the data stream lifecycle, -// - a data stream that is not an internal data stream. -// -// As a result, we defined different types of retention: -// -// - **data retention**: the retention configured on data stream level by the data stream user or owner -// - **default global retention:** the retention configured by an admin on a cluster level and applied to any -// data stream that doesn't have data retention and fulfils the criteria. -// - **max global retention:** the retention configured by an admin to guard against having long retention periods. -// Any data stream that fulfills the criteria will adhere to the data retention unless it exceeds the max retention, -// in which case the max global retention applies. -// - **effective retention:** the retention that applies on the data stream that fulfill the criteria at a given moment -// in time. It takes into consideration all the retention above and resolves it to the retention that will take effect. -// -// Considering the above changes, having a field named `retention` in the usage API was confusing. For this reason, we -// renamed it to `data_retention` and added telemetry about the other configurations too. -// -// *Impact* + -// Users that use the field `data_lifecycle.retention` should use the `data_lifecycle.data_retention` -// ==== + + +There are no notable breaking changes in {es} 9.0. +But there are some less critical breaking changes. + +[discrete] +[[breaking_90_analysis_changes]] +==== Analysis changes + +[[snowball_stemmers_have_been_upgraded]] +.Snowball stemmers have been upgraded +[%collapsible] +==== +*Details* + +Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. +==== + +[[german2_snowball_stemmer_an_alias_for_german_stemmer]] +.The "german2" snowball stemmer is now an alias for the "german" stemmer +[%collapsible] +==== +*Details* + +Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) + +*Impact* + +Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. +==== + +[[persian_analyzer_has_stemmer_by_default]] +.The 'persian' analyzer has stemmer by default +[%collapsible] +==== +*Details* + +Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. + +*Impact* + +Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. +==== + +[[korean_dictionary_for_nori_has_been_updated]] +.The Korean dictionary for Nori has been updated +[%collapsible] +==== +*Details* + +Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. +==== + +[discrete] +[[breaking_90_cluster_and_node_setting_changes]] +==== Cluster and node setting changes + +[[minimum_shard_balancer_threshold_1_0]] +.Minimum shard balancer threshold is now 1.0 +[%collapsible] +==== +*Details* + +Earlier versions of {es} accepted any non-negative value for `cluster.routing.allocation.balance.threshold`, but values smaller than `1.0` do not make sense and have been ignored since version 8.6.1. From 9.0.0 these nonsensical values are now forbidden. + +*Impact* + +Do not set `cluster.routing.allocation.balance.threshold` to a value less than `1.0`. +==== + +[[remove_cluster_routing_allocation_disk_watermark_enable_for_single_data_node_setting]] +.Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting +[%collapsible] +==== +*Details* + +Prior to 7.8, whenever a cluster had only a single data node, the watermarks would not be respected. In order to change this in 7.8+ in a backwards compatible way, we introduced the `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` node setting. The setting was deprecated in 7.14 and was made to accept only true in 8.0 + +*Impact* + +No known end user impact +==== + +[[remove_deprecated_xpack_searchable_snapshot_allocate_on_rolling_restart_setting]] +.Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting +[%collapsible] +==== +*Details* + +The `xpack.searchable.snapshot.allocate_on_rolling_restart` setting was created as an escape-hatch just in case relying on the `cluster.routing.allocation.enable=primaries` setting for allocating searchable snapshots during rolling restarts had some unintended side-effects. It has been deprecated since 8.2.0. + +*Impact* + +Remove `xpack.searchable.snapshot.allocate_on_rolling_restart` from your settings if present. +==== + +[[remove_unsupported_legacy_value_for_discovery_type]] +.Remove unsupported legacy value for `discovery.type` +[%collapsible] +==== +*Details* + +Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. + +*Impact* + +Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. +==== + +[discrete] +[[breaking_90_ingest_changes]] +==== Ingest changes + +[[remove_ecs_option_on_user_agent_processor]] +.Remove `ecs` option on `user_agent` processor +[%collapsible] +==== +*Details* + +The `user_agent` ingest processor no longer accepts the `ecs` option. (It was previously deprecated and ignored.) + +*Impact* + +Users should stop using the `ecs` option when creating instances of the `user_agent` ingest processor. The option will be removed from existing processors stored in the cluster state on upgrade. +==== + +[[remove_ignored_fallback_option_on_geoip_processor]] +.Remove ignored fallback option on GeoIP processor +[%collapsible] +==== +*Details* + +The option fallback_to_default_databases on the geoip ingest processor has been removed. (It was deprecated and ignored since 8.0.0.) + +*Impact* + +Customers should stop remove the noop fallback_to_default_databases option on any geoip ingest processors. +==== + +[discrete] +[[breaking_90_mapping_changes]] +==== Mapping changes + +[[remove_support_for_type_fields_copy_to_boost_in_metadata_field_definition]] +.Remove support for type, fields, copy_to and boost in metadata field definition +[%collapsible] +==== +*Details* + +The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + +*Impact* + +Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. +==== + +[discrete] +[[breaking_90_rest_api_changes]] +==== REST API changes + +[[apply_more_strict_parsing_of_actions_in_bulk_api]] +.Apply more strict parsing of actions in bulk API +[%collapsible] +==== +*Details* + +Previously, the following classes of malformed input were deprecated but not rejected in the action lines of the a bulk request: missing closing brace; additional keys after the action (which were ignored); additional data after the closing brace (which was ignored). They will now be considered errors and rejected. + +*Impact* + +Users must provide well-formed input when using the bulk API. (They can request REST API compatibility with v8 to get the previous behaviour back as an interim measure.) +==== + +[[error_json_structure_has_changed_when_detailed_errors_are_disabled]] +.Error JSON structure has changed when detailed errors are disabled +[%collapsible] +==== +*Details* + +This change modifies the JSON format of error messages returned to REST clients +when detailed messages are turned off. +Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, +just consisted of a single `"error"` text field with some basic information. +Setting `http.detailed_errors.enabled: true` (the default) changed this field +to an object with more detailed information. +With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always +be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed +errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. + +*Impact* + +If you have set `http.detailed_errors.enabled: false` (the default is `true`) +the structure of JSON when any exceptions occur now matches the structure when +detailed errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. +==== + +[[remove_cluster_state_from_cluster_reroute_response]] +.Remove cluster state from `/_cluster/reroute` response +[%collapsible] +==== +*Details* + +The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. + +*Impact* + +Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. +==== + +[[remove_deprecated_local_attribute_from_alias_apis]] +.Remove deprecated local attribute from alias APIs +[%collapsible] +==== +*Details* + +The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. + +*Impact* + +Cease usage of the `?local` query parameter when calling the listed APIs. +==== + +[[remove_legacy_params_from_range_query]] +.Remove legacy params from range query +[%collapsible] +==== +*Details* + +The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + +*Impact* + +Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. +==== + +[[remove_support_for_deprecated_force_source_highlighting_parameter]] +.Remove support for deprecated `force_source` highlighting parameter +[%collapsible] +==== +*Details* + +The deprecated highlighting `force_source` parameter is no longer supported. + +*Impact* + +Users should remove usages of the `force_source` parameter from their search requests. +==== [discrete] @@ -235,85 +257,45 @@ after upgrading to 9.0. To find out if you are using any deprecated functionality, enable <>. -// -// [discrete] -// [[deprecations_90_analysis]] -// ==== Analysis deprecations -// -// [[deprecate_dutch_kp_lovins_stemmer_as_they_are_removed_in_lucene_10]] -// .Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 -// [%collapsible] -// ==== -// *Details* + -// kp, dutch_kp, dutchKp and lovins stemmers are deprecated and will be removed. -// -// *Impact* + -// These stemmers will be removed and will be no longer supported. -// ==== -// -// [[deprecate_edge_ngram_side_parameter]] -// .deprecate `edge_ngram` side parameter -// [%collapsible] -// ==== -// *Details* + -// edge_ngram will no longer accept the side parameter. -// -// *Impact* + -// Users will need to update any usage of edge_ngram token filter that utilizes `side`. If the `back` value was used, they can achieve the same behavior by using the `reverse` token filter. -// ==== -// -// [discrete] -// [[deprecations_90_crud]] -// ==== CRUD deprecations -// -// [[deprecate_dot_prefixed_indices_composable_template_index_patterns]] -// .Deprecate dot-prefixed indices and composable template index patterns -// [%collapsible] -// ==== -// *Details* + -// Indices beginning with a dot '.' are reserved for system and internal indices, and should not be used by and end-user. Additionally, composable index templates that contain patterns for dot-prefixed indices should also be avoided, as these patterns are meant for internal use only. In a future Elasticsearch version, creation of these dot-prefixed indices will no longer be allowed. -// -// *Impact* + -// Requests performing an action that would create an index beginning with a dot (indexing a document, manual creation, reindex), or creating an index template with index patterns beginning with a dot, will contain a deprecation header warning about dot-prefixed indices in the response. -// ==== -// -// [discrete] -// [[deprecations_90_rest_api]] -// ==== REST API deprecations -// -// [[adding_deprecation_warnings_for_rrf_using_rank_sub_searches]] -// .Adding deprecation warnings for rrf using rank and `sub_searches` -// [%collapsible] -// ==== -// *Details* + -// Search API parameter `sub_searches` will no longer be a supported and will be removed in future releases. Similarly, `rrf` can only be used through the specified `retriever` and no longer though the `rank` parameter -// -// *Impact* + -// Requests specifying rrf through `rank` and/or `sub_searches` elements will be disallowed in a future version. Users should instead utilize the new `retriever` parameter. -// ==== -// -// [[deprecate_legacy_params_from_range_query]] -// .Deprecate legacy params from range query -// [%collapsible] -// ==== -// *Details* + -// Range query will not longer accept `to`, `from`, `include_lower`, and `include_upper` parameters. -// -// *Impact* + -// Instead use `gt`, `gte`, `lt` and `lte` parameters. -// ==== -// -// [[inference_api_deprecate_elser_service]] -// .[Inference API] Deprecate elser service -// [%collapsible] -// ==== -// *Details* + -// The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. -// -// *Impact* + -// In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. -// ==== - -// BELOW WAS MANUALLY ADDED TO FIX THE BUILD -include::migrate_9_0/transient-settings-migration-guide.asciidoc[] -//include::migrate_9_0/rest-api-changes.asciidoc[] //see ES-9932 + +[discrete] +[[deprecations_90_mapping]] +==== Mapping deprecations + +[[deprecate_source_mode_in_mappings]] +.Deprecate `_source.mode` in mappings +[%collapsible] +==== +*Details* + +Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + +*Impact* + +Use `index.mapping.source.mode` index setting instead +==== + +[discrete] +[[deprecations_90_rest_api]] +==== REST API deprecations + +[[document_type_deprecated_on_simulate_pipeline_api]] +.Document `_type` deprecated on simulate pipeline API +[%collapsible] +==== +*Details* + +Passing a document with a `_type` property is deprecated in the `/_ingest/pipeline/{id}/_simulate` and `/_ingest/pipeline/_simulate` APIs. + +*Impact* + +Users should already have stopped using mapping types, which were deprecated in {es} 7. This deprecation warning will fire if they specify mapping types on documents pass to the simulate pipeline API. +==== + +[[inference_api_deprecate_elser_service]] +.[Inference API] Deprecate elser service +[%collapsible] +==== +*Details* + +The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. + +*Impact* + +In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. +==== + diff --git a/docs/reference/release-notes/8.18.0.asciidoc b/docs/reference/release-notes/8.18.0.asciidoc new file mode 100644 index 0000000000000..332edfbc23eb7 --- /dev/null +++ b/docs/reference/release-notes/8.18.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.18.0]] +== {es} version 8.18.0 + +coming[8.18.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/9.0.0.asciidoc b/docs/reference/release-notes/9.0.0.asciidoc index af26fd57385e3..93e5a30cb82f7 100644 --- a/docs/reference/release-notes/9.0.0.asciidoc +++ b/docs/reference/release-notes/9.0.0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-notes-9.0.0]] == {es} version 9.0.0 @@ -12,546 +9,289 @@ Also see <>. [float] === Breaking changes -// Allocation:: -// * Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) -// -// Analysis:: -// * Set lenient to true by default when using updateable synonyms {es-pull}110901[#110901] -// * Snowball stemmers have been upgraded {es-pull}114146[#114146] -// * The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] -// * The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) -// * The Korean dictionary for Nori has been updated {es-pull}114124[#114124] -// -// Cluster Coordination:: -// * Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] -// -// Data streams:: -// * Update data stream lifecycle telemetry to track global retention {es-pull}112451[#112451] -// -// ES|QL:: -// * ESQL: Entirely remove META FUNCTIONS {es-pull}113967[#113967] -// -// Indices APIs:: -// * Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] -// -// Mapping:: -// * JDK locale database change {es-pull}113975[#113975] -// -// Search:: -// * Adding breaking change entry for retrievers {es-pull}115399[#115399] +Allocation:: +* Increase minimum threshold in shard balancer {es-pull}115831[#115831] +* Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting {es-pull}114207[#114207] +* Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) + +Analysis:: +* Snowball stemmers have been upgraded {es-pull}114146[#114146] +* The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] +* The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) +* The Korean dictionary for Nori has been updated {es-pull}114124[#114124] + +Cluster Coordination:: +* Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] + +Highlighting:: +* Remove support for deprecated `force_source` highlighting parameter {es-pull}116943[#116943] + +Indices APIs:: +* Apply more strict parsing of actions in bulk API {es-pull}115923[#115923] +* Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] + +Infra/REST API:: +* Output a consistent format when generating error json {es-pull}90529[#90529] (issue: {es-issue}89387[#89387]) + +Ingest Node:: +* Remove `ecs` option on `user_agent` processor {es-pull}116077[#116077] +* Remove ignored fallback option on GeoIP processor {es-pull}116112[#116112] + +Mapping:: +* Remove support for type, fields, `copy_to` and boost in metadata field definition {es-pull}116944[#116944] + +Search:: +* Remove legacy params from range query {es-pull}116970[#116970] + +Snapshot/Restore:: +* Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting {es-pull}114202[#114202] [[bug-9.0.0]] [float] === Bug fixes -// -// Aggregations:: -// * Always check the parent breaker with zero bytes in `PreallocatedCircuitBreakerService` {es-pull}115181[#115181] -// * Force using the last centroid during merging {es-pull}111644[#111644] (issue: {es-issue}111065[#111065]) -// -// Authentication:: -// * Check for disabling own user in Put User API {es-pull}112262[#112262] (issue: {es-issue}90205[#90205]) -// * Expose cluster-state role mappings in APIs {es-pull}114951[#114951] -// -// Authorization:: -// * Fix DLS & FLS sometimes being enforced when it is disabled {es-pull}111915[#111915] (issue: {es-issue}94709[#94709]) -// * Fix DLS using runtime fields and synthetic source {es-pull}112341[#112341] -// -// CRUD:: -// * Don't fail retention lease sync actions due to capacity constraints {es-pull}109414[#109414] (issue: {es-issue}105926[#105926]) -// * Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] -// * Standardize error code when bulk body is invalid {es-pull}114869[#114869] -// -// Cluster Coordination:: -// * Ensure clean thread context in `MasterService` {es-pull}114512[#114512] -// -// Data streams:: -// * Adding support for data streams with a match-all template {es-pull}111311[#111311] (issue: {es-issue}111204[#111204]) -// * Exclude internal data streams from global retention {es-pull}112100[#112100] -// * Fix verbose get data stream API not requiring extra privileges {es-pull}112973[#112973] -// * OTel mappings: avoid metrics to be rejected when attributes are malformed {es-pull}114856[#114856] -// * [otel-data] Add more kubernetes aliases {es-pull}115429[#115429] -// * logs-apm.error-*: define log.level field as keyword {es-pull}112440[#112440] -// -// Distributed:: -// * Handle `InternalSendException` inline for non-forking handlers {es-pull}114375[#114375] -// -// EQL:: -// * Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) -// * Fix validation of TEXT fields with case insensitive comparison {es-pull}111238[#111238] (issue: {es-issue}111235[#111235]) -// -// ES|QL:: -// * ESQL: Add Values aggregation tests, fix `ConstantBytesRefBlock` memory handling {es-pull}111367[#111367] -// * ESQL: Align year diffing to the rest of the units in DATE_DIFF: chronological {es-pull}113103[#113103] (issue: {es-issue}112482[#112482]) -// * ESQL: Disable pushdown of WHERE past STATS {es-pull}115308[#115308] (issue: {es-issue}115281[#115281]) -// * ESQL: Fix CASE when conditions are multivalued {es-pull}112401[#112401] (issue: {es-issue}112359[#112359]) -// * ESQL: Fix Double operations returning infinite {es-pull}111064[#111064] (issue: {es-issue}111026[#111026]) -// * ESQL: Fix `REVERSE` with backspace character {es-pull}115245[#115245] (issues: {es-issue}114372[#114372], {es-issue}115227[#115227], {es-issue}115228[#115228]) -// * ESQL: Fix a bug in `MV_PERCENTILE` {es-pull}112218[#112218] (issues: {es-issue}112193[#112193], {es-issue}112180[#112180], {es-issue}112187[#112187], {es-issue}112188[#112188]) -// * ESQL: Fix filtered grouping on ords {es-pull}115312[#115312] (issue: {es-issue}114897[#114897]) -// * ESQL: Fix grammar changes around per agg filtering {es-pull}114848[#114848] -// * ESQL: Fix serialization during `can_match` {es-pull}111779[#111779] (issues: {es-issue}111701[#111701], {es-issue}111726[#111726]) -// * ESQL: Fix synthetic attribute pruning {es-pull}111413[#111413] (issue: {es-issue}105821[#105821]) -// * ESQL: don't lose the original casting error message {es-pull}111968[#111968] (issue: {es-issue}111967[#111967]) -// * ESQL: fix for missing indices error message {es-pull}111797[#111797] (issue: {es-issue}111712[#111712]) -// * ES|QL: Fix stats by constant expression {es-pull}114899[#114899] -// * ES|QL: Restrict sorting for `_source` and counter field types {es-pull}114638[#114638] (issues: {es-issue}114423[#114423], {es-issue}111976[#111976]) -// * ES|QL: better validation for GROK patterns {es-pull}110574[#110574] (issue: {es-issue}110533[#110533]) -// * ES|QL: better validation for RLIKE patterns {es-pull}112489[#112489] (issue: {es-issue}112485[#112485]) -// * ES|QL: better validation of GROK patterns {es-pull}112200[#112200] (issue: {es-issue}112111[#112111]) -// * Fix ST_CENTROID_AGG when no records are aggregated {es-pull}114888[#114888] (issue: {es-issue}106025[#106025]) -// * Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) -// * Spatial search functions support multi-valued fields in compute engine {es-pull}112063[#112063] (issues: {es-issue}112102[#112102], {es-issue}112505[#112505], {es-issue}110830[#110830]) -// * [ES|QL] Check expression resolved before checking its data type in `ImplicitCasting` {es-pull}113314[#113314] (issue: {es-issue}113242[#113242]) -// * [ES|QL] Simplify patterns for subfields {es-pull}111118[#111118] -// * [ES|QL] Simplify syntax of named parameter for identifier and pattern {es-pull}115061[#115061] -// * [ES|QL] Skip validating remote cluster index names in parser {es-pull}114271[#114271] -// * [ES|QL] Use `RangeQuery` and String in `BinaryComparison` on datetime fields {es-pull}110669[#110669] (issue: {es-issue}107900[#107900]) -// * [ES|QL] add tests for stats by constant {es-pull}110593[#110593] (issue: {es-issue}105383[#105383]) -// * [ES|QL] make named parameter for identifier and pattern snapshot {es-pull}114784[#114784] -// * [ES|QL] validate `mv_sort` order {es-pull}110021[#110021] (issue: {es-issue}109910[#109910]) -// -// Geo:: -// * Fix cases of collections with one point {es-pull}111193[#111193] (issue: {es-issue}110982[#110982]) -// -// Health:: -// * Set `replica_unassigned_buffer_time` in constructor {es-pull}112612[#112612] -// -// ILM+SLM:: -// * Make `SnapshotLifecycleStats` immutable so `SnapshotLifecycleMetadata.EMPTY` isn't changed as side-effect {es-pull}111215[#111215] -// -// Indices APIs:: -// * Revert "Add `ResolvedExpression` wrapper" {es-pull}115317[#115317] -// -// Infra/Core:: -// * Fix max file size check to use `getMaxFileSize` {es-pull}113723[#113723] (issue: {es-issue}113705[#113705]) -// * Guard blob store local directory creation with `doPrivileged` {es-pull}115459[#115459] -// * Handle `BigInteger` in xcontent copy {es-pull}111937[#111937] (issue: {es-issue}111812[#111812]) -// * Report JVM stats for all memory pools (97046) {es-pull}115117[#115117] (issue: {es-issue}97046[#97046]) -// * `ByteArrayStreamInput:` Return -1 when there are no more bytes to read {es-pull}112214[#112214] -// -// Infra/Logging:: -// * Only emit product origin in deprecation log if present {es-pull}111683[#111683] (issue: {es-issue}81757[#81757]) -// -// Infra/Metrics:: -// * Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] -// -// Infra/REST API:: -// * Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) -// -// Infra/Settings:: -// * GET _cluster/settings with include_defaults returns the expected fallback value if defined in elasticsearch.yml {es-pull}110816[#110816] (issue: {es-issue}110815[#110815]) -// -// Ingest Node:: -// * Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) -// * Fix IPinfo geolocation schema {es-pull}115147[#115147] -// * Fix `getDatabaseType` for unusual MMDBs {es-pull}112888[#112888] -// * Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] -// -// License:: -// * Fix Start Trial API output acknowledgement header for features {es-pull}111740[#111740] (issue: {es-issue}111739[#111739]) -// * Fix `TokenService` always appearing used in Feature Usage {es-pull}112263[#112263] (issue: {es-issue}61956[#61956]) -// -// Logs:: -// * Do not expand dots when storing objects in ignored source {es-pull}113910[#113910] -// * Fix `ignore_above` handling in synthetic source when index level setting is used {es-pull}113570[#113570] (issue: {es-issue}113538[#113538]) -// * Fix synthetic source for flattened field when used with `ignore_above` {es-pull}113499[#113499] (issue: {es-issue}112044[#112044]) -// -// Machine Learning:: -// * Avoid `ModelAssignment` deadlock {es-pull}109684[#109684] -// * Fix NPE in Get Deployment Stats {es-pull}115404[#115404] -// * Fix bug in ML serverless autoscaling which prevented trained model updates from triggering a scale up {es-pull}110734[#110734] -// * Ignore unrecognized openai sse fields {es-pull}114715[#114715] -// * Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) -// * Prevent NPE if model assignment is removed while waiting to start {es-pull}115430[#115430] -// * Send mid-stream errors to users {es-pull}114549[#114549] -// * Temporarily return both `modelId` and `inferenceId` for GET /_inference until we migrate clients to only `inferenceId` {es-pull}111490[#111490] -// * Warn for model load failures if they have a status code <500 {es-pull}113280[#113280] -// * [Inference API] Remove unused Cohere rerank service settings fields in a BWC way {es-pull}110427[#110427] -// * [ML] Create Inference API will no longer return model_id and now only return inference_id {es-pull}112508[#112508] -// -// Mapping:: -// * Fix `MapperBuilderContext#isDataStream` when used in dynamic mappers {es-pull}110554[#110554] -// * Fix synthetic source field names for multi-fields {es-pull}112850[#112850] -// * Retrieve the source for objects and arrays in a separate parsing phase {es-pull}113027[#113027] (issue: {es-issue}112374[#112374]) -// * Two empty mappings now are created equally {es-pull}107936[#107936] (issue: {es-issue}107031[#107031]) -// -// Ranking:: -// * Fix MLTQuery handling of custom term frequencies {es-pull}110846[#110846] -// * Fix RRF validation for `rank_constant` < 1 {es-pull}112058[#112058] -// * Fix score count validation in reranker response {es-pull}111212[#111212] (issue: {es-issue}111202[#111202]) -// -// Search:: -// * Allow for querries on `_tier` to skip shards in the `can_match` phase {es-pull}114990[#114990] (issue: {es-issue}114910[#114910]) -// * Allow out of range term queries for numeric types {es-pull}112916[#112916] -// * Do not exclude empty arrays or empty objects in source filtering {es-pull}112250[#112250] (issue: {es-issue}109668[#109668]) -// * Fix synthetic source handling for `bit` type in `dense_vector` field {es-pull}114407[#114407] (issue: {es-issue}114402[#114402]) -// * Improve DateTime error handling and add some bad date tests {es-pull}112723[#112723] (issue: {es-issue}112190[#112190]) -// * Improve date expression/remote handling in index names {es-pull}112405[#112405] (issue: {es-issue}112243[#112243]) -// * Make "too many clauses" throw IllegalArgumentException to avoid 500s {es-pull}112678[#112678] (issue: {es-issue}112177[#112177]) -// * Make empty string searches be consistent with case (in)sensitivity {es-pull}110833[#110833] -// * Prevent flattening of ordered and unordered interval sources {es-pull}114234[#114234] -// * Remove needless forking to GENERIC in `TransportMultiSearchAction` {es-pull}110796[#110796] -// * Search/Mapping: KnnVectorQueryBuilder support for allowUnmappedFields {es-pull}107047[#107047] (issue: {es-issue}106846[#106846]) -// * Span term query to convert to match no docs when unmapped field is targeted {es-pull}113251[#113251] -// * Speedup `CanMatchPreFilterSearchPhase` constructor {es-pull}110860[#110860] -// * Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) -// -// Security:: -// * Updated the transport CA name in Security Auto-Configuration. {es-pull}106520[#106520] (issue: {es-issue}106455[#106455]) -// -// Snapshot/Restore:: -// * Retry throttled snapshot deletions {es-pull}113237[#113237] -// -// TSDB:: -// * Implement `parseBytesRef` for `TimeSeriesRoutingHashFieldType` {es-pull}113373[#113373] (issue: {es-issue}112399[#112399]) -// -// Task Management:: -// * Improve handling of failure to create persistent task {es-pull}114386[#114386] -// -// Transform:: -// * Allow task canceling of validate API calls {es-pull}110951[#110951] -// * Include reason when no nodes are found {es-pull}112409[#112409] (issue: {es-issue}112404[#112404]) -// -// Vector Search:: -// * Fix dim validation for bit `element_type` {es-pull}114533[#114533] -// * Support semantic_text in object fields {es-pull}114601[#114601] (issue: {es-issue}114401[#114401]) -// -// Watcher:: -// * Truncating watcher history if it is too large {es-pull}111245[#111245] (issue: {es-issue}94745[#94745]) -// * Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) -// -// [[deprecation-9.0.0]] -// [float] -// === Deprecations -// -// Analysis:: -// * Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 {es-pull}113143[#113143] -// * deprecate `edge_ngram` side parameter {es-pull}110829[#110829] -// -// CRUD:: -// * Deprecate dot-prefixed indices and composable template index patterns {es-pull}112571[#112571] -// -// Machine Learning:: -// * [Inference API] Deprecate elser service {es-pull}113216[#113216] -// -// Search:: -// * Adding deprecation warnings for rrf using rank and `sub_searches` {es-pull}114854[#114854] -// * Deprecate legacy params from range query {es-pull}113286[#113286] -// -// [[enhancement-9.0.0]] -// [float] -// === Enhancements -// -// Aggregations:: -// * Account for `DelayedBucket` before reduction {es-pull}113013[#113013] -// * Add protection for OOM during aggregations partial reduction {es-pull}110520[#110520] -// * Deduplicate `BucketOrder` when deserializing {es-pull}112707[#112707] -// * Lower the memory footprint when creating `DelayedBucket` {es-pull}112519[#112519] -// * Reduce heap usage for `AggregatorsReducer` {es-pull}112874[#112874] -// * Remove reduce and `reduceContext` from `DelayedBucket` {es-pull}112547[#112547] -// -// Allocation:: -// * Add link to flood-stage watermark exception message {es-pull}111315[#111315] -// * Always allow rebalancing by default {es-pull}111015[#111015] -// * Only publish desired balance gauges on master {es-pull}115383[#115383] -// -// Application:: -// * [Profiling] add `container.id` field to event index template {es-pull}111969[#111969] -// -// Authorization:: -// * Add manage roles privilege {es-pull}110633[#110633] -// * Add privileges required for CDR misconfiguration features to work on AWS SecurityHub integration {es-pull}112574[#112574] -// * [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] -// -// CRUD:: -// * Suppress merge-on-recovery for older indices {es-pull}113462[#113462] -// -// Codec:: -// * Remove zstd feature flag for index codec best compression {es-pull}112665[#112665] -// -// Data streams:: -// * Add 'verbose' flag retrieving `maximum_timestamp` for get data stream API {es-pull}112303[#112303] -// * Display effective retention in the relevant data stream APIs {es-pull}112019[#112019] -// * Expose global retention settings via data stream lifecycle API {es-pull}112210[#112210] -// * Make ecs@mappings work with OTel attributes {es-pull}111600[#111600] -// -// Distributed:: -// * Add link to Max Shards Per Node exception message {es-pull}110993[#110993] -// * Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] -// -// EQL:: -// * ESQL: Delay construction of warnings {es-pull}114368[#114368] -// -// ES|QL:: -// * Add EXP ES|QL function {es-pull}110879[#110879] -// * Add `CircuitBreaker` to TDigest, Step 3: Connect with ESQL CB {es-pull}113387[#113387] -// * Add `CircuitBreaker` to TDigest, Step 4: Take into account shallow classes size {es-pull}113613[#113613] (issue: {es-issue}113916[#113916]) -// * Collect and display execution metadata for ES|QL cross cluster searches {es-pull}112595[#112595] (issue: {es-issue}112402[#112402]) -// * ESQL: Add support for multivalue fields in Arrow output {es-pull}114774[#114774] -// * ESQL: BUCKET: allow numerical spans as whole numbers {es-pull}111874[#111874] (issues: {es-issue}104646[#104646], {es-issue}109340[#109340], {es-issue}105375[#105375]) -// * ESQL: Have BUCKET generate friendlier intervals {es-pull}111879[#111879] (issue: {es-issue}110916[#110916]) -// * ESQL: Profile more timing information {es-pull}111855[#111855] -// * ESQL: Push down filters even in case of renames in Evals {es-pull}114411[#114411] -// * ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] -// * ESQL: Speed up CASE for some parameters {es-pull}112295[#112295] -// * ESQL: Speed up grouping by bytes {es-pull}114021[#114021] -// * ESQL: Support INLINESTATS grouped on expressions {es-pull}111690[#111690] -// * ESQL: Use less memory in listener {es-pull}114358[#114358] -// * ES|QL: Add support for cached strings in plan serialization {es-pull}112929[#112929] -// * ES|QL: add Telemetry API and track top functions {es-pull}111226[#111226] -// * ES|QL: add metrics for functions {es-pull}114620[#114620] -// * Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function {es-pull}112938[#112938] (issue: {es-issue}109973[#109973]) -// * Siem ea 9521 improve test {es-pull}111552[#111552] -// * Support multi-valued fields in compute engine for ST_DISTANCE {es-pull}114836[#114836] (issue: {es-issue}112910[#112910]) -// * [ESQL] Add `SPACE` function {es-pull}112350[#112350] -// * [ESQL] Add finish() elapsed time to aggregation profiling times {es-pull}113172[#113172] (issue: {es-issue}112950[#112950]) -// * [ESQL] Make query wrapped by `SingleValueQuery` cacheable {es-pull}110116[#110116] -// * [ES|QL] Add hypot function {es-pull}114382[#114382] -// * [ES|QL] Cast mixed numeric types to a common numeric type for Coalesce and In at Analyzer {es-pull}111917[#111917] (issue: {es-issue}111486[#111486]) -// * [ES|QL] Combine Disjunctive CIDRMatch {es-pull}111501[#111501] (issue: {es-issue}105143[#105143]) -// * [ES|QL] Create `Range` in `PushFiltersToSource` for qualified pushable filters on the same field {es-pull}111437[#111437] -// * [ES|QL] Name parameter with leading underscore {es-pull}111950[#111950] (issue: {es-issue}111821[#111821]) -// * [ES|QL] Named parameter for field names and field name patterns {es-pull}112905[#112905] -// * [ES|QL] Validate index name in parser {es-pull}112081[#112081] -// * [ES|QL] add reverse function {es-pull}113297[#113297] -// * [ES|QL] explicit cast a string literal to `date_period` and `time_duration` in arithmetic operations {es-pull}109193[#109193] -// -// Experiences:: -// * Integrate IBM watsonx to Inference API for text embeddings {es-pull}111770[#111770] -// -// Geo:: -// * Add support for spatial relationships in point field mapper {es-pull}112126[#112126] -// * Small performance improvement in h3 library {es-pull}113385[#113385] -// * Support docvalues only query in shape field {es-pull}112199[#112199] -// -// Health:: -// * (API) Cluster Health report `unassigned_primary_shards` {es-pull}112024[#112024] -// * Do not treat replica as unassigned if primary recently created and unassigned time is below a threshold {es-pull}112066[#112066] -// * Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] -// -// ILM+SLM:: -// * ILM: Add `total_shards_per_node` setting to searchable snapshot {es-pull}112972[#112972] (issue: {es-issue}112261[#112261]) -// * PUT slm policy should only increase version if actually changed {es-pull}111079[#111079] -// * Preserve Step Info Across ILM Auto Retries {es-pull}113187[#113187] -// * Register SLM run before snapshotting to save stats {es-pull}110216[#110216] -// * SLM interval schedule followup - add back `getFieldName` style getters {es-pull}112123[#112123] -// -// Infra/Circuit Breakers:: -// * Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] -// -// Infra/Core:: -// * Add nanos support to `ZonedDateTime` serialization {es-pull}111689[#111689] (issue: {es-issue}68292[#68292]) -// * Extend logging for dropped warning headers {es-pull}111624[#111624] (issue: {es-issue}90527[#90527]) -// * Give the kibana system user permission to read security entities {es-pull}114363[#114363] -// -// Infra/Metrics:: -// * Add `TaskManager` to `pluginServices` {es-pull}112687[#112687] -// * Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] -// -// Infra/REST API:: -// * Optimize the loop processing of URL decoding {es-pull}110237[#110237] (issue: {es-issue}110235[#110235]) -// -// Infra/Scripting:: -// * Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] -// * Expose `HexFormat` in Painless {es-pull}112412[#112412] -// -// Infra/Settings:: -// * Improve exception message for bad environment variable placeholders in settings {es-pull}114552[#114552] (issue: {es-issue}110858[#110858]) -// * Reprocess operator file settings when settings service starts, due to node restart or master node change {es-pull}114295[#114295] -// -// Ingest Node:: -// * Add `size_in_bytes` to enrich cache stats {es-pull}110578[#110578] -// * Add support for templates when validating mappings in the simulate ingest API {es-pull}111161[#111161] -// * Adding `index_template_substitutions` to the simulate ingest API {es-pull}114128[#114128] -// * Adding component template substitutions to the simulate ingest API {es-pull}113276[#113276] -// * Adding mapping validation to the simulate ingest API {es-pull}110606[#110606] -// * Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] -// * Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] -// * Adds example plugin for custom ingest processor {es-pull}112282[#112282] (issue: {es-issue}111539[#111539]) -// * Fix unnecessary mustache template evaluation {es-pull}110986[#110986] (issue: {es-issue}110191[#110191]) -// * Listing all available databases in the _ingest/geoip/database API {es-pull}113498[#113498] -// * Make enrich cache based on memory usage {es-pull}111412[#111412] (issue: {es-issue}106081[#106081]) -// * Tag redacted document in ingest metadata {es-pull}113552[#113552] -// * Verify Maxmind database types in the geoip processor {es-pull}114527[#114527] -// -// Logs:: -// * Add validation for synthetic source mode in logs mode indices {es-pull}110677[#110677] -// * Store original source for keywords using a normalizer {es-pull}112151[#112151] -// -// Machine Learning:: -// * Add Completion Inference API for Alibaba Cloud AI Search Model {es-pull}112512[#112512] -// * Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] -// * Add Streaming Inference spec {es-pull}113812[#113812] -// * Add chunking settings configuration to `CohereService,` `AmazonBedrockService,` and `AzureOpenAiService` {es-pull}113897[#113897] -// * Add chunking settings configuration to `ElasticsearchService/ELSER` {es-pull}114429[#114429] -// * Add custom rule parameters to force time shift {es-pull}110974[#110974] -// * Adding chunking settings to `GoogleVertexAiService,` `AzureAiStudioService,` and `AlibabaCloudSearchService` {es-pull}113981[#113981] -// * Adding chunking settings to `MistralService,` `GoogleAiStudioService,` and `HuggingFaceService` {es-pull}113623[#113623] -// * Adds a new Inference API for streaming responses back to the user. {es-pull}113158[#113158] -// * Create `StreamingHttpResultPublisher` {es-pull}112026[#112026] -// * Create an ml node inference endpoint referencing an existing model {es-pull}114750[#114750] -// * Default inference endpoint for ELSER {es-pull}113873[#113873] -// * Default inference endpoint for the multilingual-e5-small model {es-pull}114683[#114683] -// * Enable OpenAI Streaming {es-pull}113911[#113911] -// * Filter empty task settings objects from the API response {es-pull}114389[#114389] -// * Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] -// * Migrate Inference to `ChunkedToXContent` {es-pull}111655[#111655] -// * Register Task while Streaming {es-pull}112369[#112369] -// * Server-Sent Events for Inference response {es-pull}112565[#112565] -// * Stream Anthropic Completion {es-pull}114321[#114321] -// * Stream Azure Completion {es-pull}114464[#114464] -// * Stream Bedrock Completion {es-pull}114732[#114732] -// * Stream Cohere Completion {es-pull}114080[#114080] -// * Stream Google Completion {es-pull}114596[#114596] -// * Stream OpenAI Completion {es-pull}112677[#112677] -// * Support sparse embedding models in the elasticsearch inference service {es-pull}112270[#112270] -// * Switch default chunking strategy to sentence {es-pull}114453[#114453] -// * Upgrade to AWS SDK v2 {es-pull}114309[#114309] (issue: {es-issue}110590[#110590]) -// * Use the same chunking configurations for models in the Elasticsearch service {es-pull}111336[#111336] -// * Validate streaming HTTP Response {es-pull}112481[#112481] -// * Wait for allocation on scale up {es-pull}114719[#114719] -// * [Inference API] Add Alibaba Cloud AI Search Model support to Inference API {es-pull}111181[#111181] -// * [Inference API] Add Docs for AlibabaCloud AI Search Support for the Inference API {es-pull}111181[#111181] -// * [Inference API] Introduce Update API to change some aspects of existing inference endpoints {es-pull}114457[#114457] -// * [Inference API] Prevent inference endpoints from being deleted if they are referenced by semantic text {es-pull}110399[#110399] -// * [Inference API] alibabacloud ai search service support chunk infer to support semantic_text field {es-pull}110399[#110399] -// -// Mapping:: -// * Add Field caps support for Semantic Text {es-pull}111809[#111809] -// * Add Lucene segment-level fields stats {es-pull}111123[#111123] -// * Add Search Inference ID To Semantic Text Mapping {es-pull}113051[#113051] -// * Add object param for keeping synthetic source {es-pull}113690[#113690] -// * Add support for multi-value dimensions {es-pull}112645[#112645] (issue: {es-issue}110387[#110387]) -// * Allow dimension fields to have multiple values in standard and logsdb index mode {es-pull}112345[#112345] (issues: {es-issue}112232[#112232], {es-issue}112239[#112239]) -// * Allow fields with dots in sparse vector field mapper {es-pull}111981[#111981] (issue: {es-issue}109118[#109118]) -// * Allow querying `index_mode` {es-pull}110676[#110676] -// * Configure keeping source in `FieldMapper` {es-pull}112706[#112706] -// * Control storing array source with index setting {es-pull}112397[#112397] -// * Introduce mode `subobjects=auto` for objects {es-pull}110524[#110524] -// * Update `semantic_text` field to support indexing numeric and boolean data types {es-pull}111284[#111284] -// * Use ELSER By Default For Semantic Text {es-pull}113563[#113563] -// * Use fallback synthetic source for `copy_to` and doc_values: false cases {es-pull}112294[#112294] (issues: {es-issue}110753[#110753], {es-issue}110038[#110038], {es-issue}109546[#109546]) -// -// Network:: -// * Add links to network disconnect troubleshooting {es-pull}112330[#112330] -// -// Ranking:: -// * Add timeout and cancellation check to rescore phase {es-pull}115048[#115048] -// -// Recovery:: -// * Trigger merges after recovery {es-pull}113102[#113102] -// -// Relevance:: -// * Add a query rules tester API call {es-pull}114168[#114168] -// -// Search:: -// * Add initial support for `semantic_text` field type {es-pull}113920[#113920] -// * Add more `dense_vector` details for cluster stats field stats {es-pull}113607[#113607] -// * Add range and regexp Intervals {es-pull}111465[#111465] -// * Adding support for `allow_partial_search_results` in PIT {es-pull}111516[#111516] -// * Allow incubating Panama Vector in simdvec, and add vectorized `ipByteBin` {es-pull}112933[#112933] -// * Avoid using concurrent collector manager in `LuceneChangesSnapshot` {es-pull}113816[#113816] -// * Bool query early termination should also consider `must_not` clauses {es-pull}115031[#115031] -// * Deduplicate Kuromoji User Dictionary {es-pull}112768[#112768] -// * Multi term intervals: increase max_expansions {es-pull}112826[#112826] (issue: {es-issue}110491[#110491]) -// * Search coordinator uses `event.ingested` in cluster state to do rewrites {es-pull}111523[#111523] -// * Update cluster stats for retrievers {es-pull}114109[#114109] -// -// Security:: -// * (logger) change from error to warn for short circuiting user {es-pull}112895[#112895] -// * Add asset criticality indices for `kibana_system_user` {es-pull}113588[#113588] -// * Add tier preference to security index settings allowlist {es-pull}111818[#111818] -// * [Service Account] Add `AutoOps` account {es-pull}111316[#111316] -// -// Snapshot/Restore:: -// * Add `max_multipart_parts` setting to S3 repository {es-pull}113989[#113989] -// * Add support for Azure Managed Identity {es-pull}111344[#111344] -// * Add telemetry for repository usage {es-pull}112133[#112133] -// * Add workaround for missing shard gen blob {es-pull}112337[#112337] -// * Clean up dangling S3 multipart uploads {es-pull}111955[#111955] (issues: {es-issue}101169[#101169], {es-issue}44971[#44971]) -// * Execute shard snapshot tasks in shard-id order {es-pull}111576[#111576] (issue: {es-issue}108739[#108739]) -// * Include account name in Azure settings exceptions {es-pull}111274[#111274] -// * Introduce repository integrity verification API {es-pull}112348[#112348] (issue: {es-issue}52622[#52622]) -// * Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] -// * Track shard snapshot progress during node shutdown {es-pull}112567[#112567] -// -// Stats:: -// * Track search and fetch failure stats {es-pull}113988[#113988] -// -// TSDB:: -// * Add support for boolean dimensions {es-pull}111457[#111457] (issue: {es-issue}111338[#111338]) -// * Stop iterating over all fields to extract @timestamp value {es-pull}110603[#110603] (issue: {es-issue}92297[#92297]) -// * Support booleans in routing path {es-pull}111445[#111445] -// -// Vector Search:: -// * Dense vector field types updatable for int4 {es-pull}110928[#110928] -// * Use native scalar scorer for int8_flat index {es-pull}111071[#111071] -// -// [[feature-9.0.0]] -// [float] -// === New features -// -// Data streams:: -// * Introduce global retention in data stream lifecycle. {es-pull}111972[#111972] -// * X-pack/plugin/otel: introduce x-pack-otel plugin {es-pull}111091[#111091] -// -// ES|QL:: -// * Add ESQL match function {es-pull}113374[#113374] -// * ESQL: Add `MV_PSERIES_WEIGHTED_SUM` for score calculations used by security solution {es-pull}109017[#109017] -// * ESQL: Add async ID and `is_running` headers to ESQL async query {es-pull}111840[#111840] -// * ESQL: Add boolean support to Max and Min aggs {es-pull}110527[#110527] -// * ESQL: Add boolean support to TOP aggregation {es-pull}110718[#110718] -// * ESQL: Added `mv_percentile` function {es-pull}111749[#111749] (issue: {es-issue}111591[#111591]) -// * ESQL: INLINESTATS {es-pull}109583[#109583] (issue: {es-issue}107589[#107589]) -// * ESQL: Introduce per agg filter {es-pull}113735[#113735] -// * ESQL: Strings support for MAX and MIN aggregations {es-pull}111544[#111544] -// * ESQL: Support IP fields in MAX and MIN aggregations {es-pull}110921[#110921] -// * ESQL: TOP aggregation IP support {es-pull}111105[#111105] -// * ESQL: TOP support for strings {es-pull}113183[#113183] (issue: {es-issue}109849[#109849]) -// * ESQL: `mv_median_absolute_deviation` function {es-pull}112055[#112055] (issue: {es-issue}111590[#111590]) -// * Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] -// * Search in ES|QL: Add MATCH operator {es-pull}110971[#110971] -// -// ILM+SLM:: -// * SLM Interval based scheduling {es-pull}110847[#110847] -// -// Inference:: -// * EIS integration {es-pull}111154[#111154] -// -// Ingest Node:: -// * Add a `terminate` ingest processor {es-pull}114157[#114157] (issue: {es-issue}110218[#110218]) -// -// Machine Learning:: -// * Inference autoscaling {es-pull}109667[#109667] -// * Telemetry for inference adaptive allocations {es-pull}110630[#110630] -// -// Relevance:: -// * [Query rules] Add `exclude` query rule type {es-pull}111420[#111420] -// -// Search:: -// * Async search: Add ID and "is running" http headers {es-pull}112431[#112431] (issue: {es-issue}109576[#109576]) -// * Cross-cluster search telemetry {es-pull}113825[#113825] -// -// Vector Search:: -// * Adding new bbq index types behind a feature flag {es-pull}114439[#114439] + +Aggregations:: +* Handle with `illegalArgumentExceptions` negative values in HDR percentile aggregations {es-pull}116174[#116174] (issue: {es-issue}115777[#115777]) + +Analysis:: +* Adjust analyze limit exception to be a `bad_request` {es-pull}116325[#116325] + +CCS:: +* Fix long metric deserialize & add - auto-resize needs to be set manually {es-pull}117105[#117105] (issue: {es-issue}116914[#116914]) + +CRUD:: +* Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] +* Standardize error code when bulk body is invalid {es-pull}114869[#114869] + +Data streams:: +* Add missing header in `put_data_lifecycle` rest-api-spec {es-pull}116292[#116292] + +EQL:: +* Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) + +ES|QL:: +* Added stricter range type checks and runtime warnings for ENRICH {es-pull}115091[#115091] (issues: {es-issue}107357[#107357], {es-issue}116799[#116799]) +* Don't return TEXT type for functions that take TEXT {es-pull}114334[#114334] (issues: {es-issue}111537[#111537], {es-issue}114333[#114333]) +* ESQL: Fix sorts containing `_source` {es-pull}116980[#116980] (issue: {es-issue}116659[#116659]) +* ESQL: fix the column position in errors {es-pull}117153[#117153] +* ES|QL: Fix stats by constant expression {es-pull}114899[#114899] +* Fix NPE in `EnrichLookupService` on mixed clusters with <8.14 versions {es-pull}116583[#116583] (issues: {es-issue}116529[#116529], {es-issue}116544[#116544]) +* Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) +* Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` {es-pull}114665[#114665] (issue: {es-issue}105095[#105095]) +* Use `SearchStats` instead of field.isAggregatable in data node planning {es-pull}115744[#115744] (issue: {es-issue}115737[#115737]) +* [ESQL] Fix Binary Comparisons on Date Nanos {es-pull}116346[#116346] +* [ES|QL] To_DatePeriod and To_TimeDuration return better error messages on `union_type` fields {es-pull}114934[#114934] + +Infra/CLI:: +* Fix NPE on plugin sync {es-pull}115640[#115640] (issue: {es-issue}114818[#114818]) + +Infra/Metrics:: +* Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] + +Infra/REST API:: +* Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) + +Infra/Settings:: +* Don't allow secure settings in YML config (109115) {es-pull}115779[#115779] (issue: {es-issue}109115[#109115]) + +Ingest Node:: +* Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) +* Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] + +Logs:: +* Always check if index mode is logsdb {es-pull}116922[#116922] +* Prohibit changes to index mode, source, and sort settings during resize {es-pull}115812[#115812] + +Machine Learning:: +* Fix bug in ML autoscaling when some node info is unavailable {es-pull}116650[#116650] +* Fix deberta tokenizer bug caused by bug in normalizer {es-pull}117189[#117189] +* Hides `hugging_face_elser` service from the `GET _inference/_services API` {es-pull}116664[#116664] (issue: {es-issue}116644[#116644]) +* Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) +* Propagate scoring function through random sampler {es-pull}116957[#116957] (issue: {es-issue}110134[#110134]) +* Update Deberta tokenizer {es-pull}116358[#116358] +* Wait for up to 2 seconds for yellow status before starting search {es-pull}115938[#115938] (issues: {es-issue}107777[#107777], {es-issue}105955[#105955], {es-issue}107815[#107815], {es-issue}112191[#112191]) + +Mapping:: +* Change synthetic source logic for `constant_keyword` {es-pull}117182[#117182] (issue: {es-issue}117083[#117083]) +* Ignore conflicting fields during dynamic mapping update {es-pull}114227[#114227] (issue: {es-issue}114228[#114228]) + +Network:: +* Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` {es-pull}116211[#116211] + +Ranking:: +* Propagating nested `inner_hits` to the parent compound retriever {es-pull}116408[#116408] (issue: {es-issue}116397[#116397]) + +Relevance:: +* Fix handling of bulk requests with semantic text fields and delete ops {es-pull}116942[#116942] + +Search:: +* Catch and handle disconnect exceptions in search {es-pull}115836[#115836] +* Fields caps does not honour ignore_unavailable {es-pull}116021[#116021] (issue: {es-issue}107767[#107767]) +* Fix handling of time exceeded exception in fetch phase {es-pull}116676[#116676] +* Fix leak in `DfsQueryPhase` and introduce search disconnect stress test {es-pull}116060[#116060] (issue: {es-issue}115056[#115056]) +* Inconsistency in the _analyzer api when the index is not included {es-pull}115930[#115930] +* Semantic text simple partial update {es-pull}116478[#116478] +* Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) +* Validate missing shards after the coordinator rewrite {es-pull}116382[#116382] +* _validate does not honour ignore_unavailable {es-pull}116656[#116656] (issue: {es-issue}116594[#116594]) + +Snapshot/Restore:: +* Retry throttled snapshot deletions {es-pull}113237[#113237] + +Vector Search:: +* Update Semantic Query To Handle Zero Size Responses {es-pull}116277[#116277] (issue: {es-issue}116083[#116083]) + +Watcher:: +* Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) + +[[deprecation-9.0.0]] +[float] +=== Deprecations + +Ingest Node:: +* Fix `_type` deprecation on simulate pipeline API {es-pull}116259[#116259] + +Machine Learning:: +* [Inference API] Deprecate elser service {es-pull}113216[#113216] + +Mapping:: +* Deprecate `_source.mode` in mappings {es-pull}116689[#116689] + +[[enhancement-9.0.0]] +[float] +=== Enhancements + +Allocation:: +* Only publish desired balance gauges on master {es-pull}115383[#115383] + +Authorization:: +* Add a `monitor_stats` privilege and allow that privilege for remote cluster privileges {es-pull}114964[#114964] +* [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] + +CRUD:: +* Suppress merge-on-recovery for older indices {es-pull}113462[#113462] + +Data streams:: +* Adding a deprecation info API warning for data streams with old indices {es-pull}116447[#116447] +* Apm-data: disable date_detection for all apm data streams {es-pull}116995[#116995] + +Distributed:: +* Metrics for incremental bulk splits {es-pull}116765[#116765] +* Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] + +ES|QL:: +* Add ES|QL `bit_length` function {es-pull}115792[#115792] +* ESQL: Honor skip_unavailable setting for nonmatching indices errors at planning time {es-pull}116348[#116348] (issue: {es-issue}114531[#114531]) +* ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] +* ESQL: extract common filter from aggs {es-pull}115678[#115678] +* ESQL: optimise aggregations filtered by false/null into evals {es-pull}115858[#115858] +* ES|QL CCS uses `skip_unavailable` setting for handling disconnected remote clusters {es-pull}115266[#115266] (issue: {es-issue}114531[#114531]) +* ES|QL: add metrics for functions {es-pull}114620[#114620] +* Esql Enable Date Nanos (tech preview) {es-pull}117080[#117080] +* Support partial sort fields in TopN pushdown {es-pull}116043[#116043] (issue: {es-issue}114515[#114515]) +* [ES|QL] Implicit casting string literal to intervals {es-pull}115814[#115814] (issue: {es-issue}115352[#115352]) + +Health:: +* Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] + +Indices APIs:: +* Ensure class resource stream is closed in `ResourceUtils` {es-pull}116437[#116437] + +Inference:: +* Add version prefix to Inference Service API path {es-pull}117095[#117095] + +Infra/Circuit Breakers:: +* Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] + +Infra/Core:: +* Support for unsigned 64 bit numbers in Cpu stats {es-pull}114681[#114681] (issue: {es-issue}112274[#112274]) + +Infra/Metrics:: +* Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] + +Infra/Scripting:: +* Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] + +Ingest Node:: +* Add postal_code support to the City and Enterprise databases {es-pull}114193[#114193] +* Add support for registered country fields for maxmind geoip databases {es-pull}114521[#114521] +* Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] +* Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] +* Support IPinfo database configurations {es-pull}114548[#114548] +* Support more maxmind fields in the geoip processor {es-pull}114268[#114268] + +Logs:: +* Add logsdb telemetry {es-pull}115994[#115994] +* Add num docs and size to logsdb telemetry {es-pull}116128[#116128] +* Feature: re-structure document ID generation favoring _id inverted index compression {es-pull}104683[#104683] + +Machine Learning:: +* Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] +* Add special case for elastic reranker in inference API {es-pull}116962[#116962] +* Adding inference endpoint validation for `AzureAiStudioService` {es-pull}113713[#113713] +* Adds support for `input_type` field to Vertex inference service {es-pull}116431[#116431] +* Enable built-in Inference Endpoints and default for Semantic Text {es-pull}116931[#116931] +* Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] +* Inference duration and error metrics {es-pull}115876[#115876] +* Remove all mentions of eis and gateway and deprecate flags that do {es-pull}116692[#116692] +* [Inference API] Add API to get configuration of inference services {es-pull}114862[#114862] +* [Inference API] Improve chunked results error message {es-pull}115807[#115807] + +Network:: +* Allow http unsafe buffers by default {es-pull}116115[#116115] + +Recovery:: +* Attempt to clean up index before remote transfer {es-pull}115142[#115142] (issue: {es-issue}104473[#104473]) +* Trigger merges after recovery {es-pull}113102[#113102] + +Reindex:: +* Change Reindexing metrics unit from millis to seconds {es-pull}115721[#115721] + +Relevance:: +* Add query rules retriever {es-pull}114855[#114855] +* Add tracking for query rule types {es-pull}116357[#116357] + +Search:: +* Add Search Phase APM metrics {es-pull}113194[#113194] +* Add `docvalue_fields` Support for `dense_vector` Fields {es-pull}114484[#114484] (issue: {es-issue}108470[#108470]) +* Add initial support for `semantic_text` field type {es-pull}113920[#113920] +* Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter {es-pull}115459[#115459] (issue: {es-issue}97849[#97849]) +* Better sizing `BytesRef` for Strings in Queries {es-pull}115655[#115655] +* Enable `_tier` based coordinator rewrites for all indices (not just mounted indices) {es-pull}115797[#115797] +* Only aggregations require at least one shard request {es-pull}115314[#115314] + +Security:: +* Add refresh `.security` index call between security migrations {es-pull}114879[#114879] + +Snapshot/Restore:: +* Improve message about insecure S3 settings {es-pull}116915[#116915] +* Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] +* Split searchable snapshot into multiple repo operations {es-pull}116918[#116918] +* Track shard snapshot progress during node shutdown {es-pull}112567[#112567] + +Vector Search:: +* Add support for bitwise inner-product in painless {es-pull}116082[#116082] + +[[feature-9.0.0]] +[float] +=== New features + +Data streams:: +* Add default ILM policies and switch to ILM for apm-data plugin {es-pull}115687[#115687] + +ES|QL:: +* Add support for `BYTE_LENGTH` scalar function {es-pull}116591[#116591] +* Esql/lookup join grammar {es-pull}116515[#116515] +* Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] + +Search:: +* ESQL - Add match operator (:) {es-pull}116819[#116819] [[upgrade-9.0.0]] [float] === Upgrades -// -// Infra/Core:: -// * Upgrade xcontent to Jackson 2.17.0 {es-pull}111948[#111948] -// * Upgrade xcontent to Jackson 2.17.2 {es-pull}112320[#112320] -// -// Infra/Metrics:: -// * Update APM Java Agent to support JDK 23 {es-pull}115194[#115194] (issues: {es-issue}115101[#115101], {es-issue}115100[#115100]) -// -// Search:: -// * Upgrade to Lucene 10 {es-pull}114741[#114741] -// * Upgrade to Lucene 9.12 {es-pull}113333[#113333] -// -// Snapshot/Restore:: -// * Upgrade Azure SDK {es-pull}111225[#111225] -// * Upgrade `repository-azure` dependencies {es-pull}112277[#112277] + +Search:: +* Upgrade to Lucene 10 {es-pull}114741[#114741] diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index edecd4f727583..b87081639c684 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-highlights]] == What's new in {minor-version} @@ -12,163 +9,14 @@ For detailed information about this release, see the <> and <>. endif::[] -// -// // tag::notable-highlights[] -// -// [discrete] -// [[esql_inlinestats]] -// === ESQL: INLINESTATS -// This adds the `INLINESTATS` command to ESQL which performs a STATS and -// then enriches the results into the output stream. So, this query: -// -// [source,esql] -// ---- -// FROM test -// | INLINESTATS m=MAX(a * b) BY b -// | WHERE m == a * b -// | SORT a DESC, b DESC -// | LIMIT 3 -// ---- -// -// Produces output like: -// -// | a | b | m | -// | --- | --- | ----- | -// | 99 | 999 | 98901 | -// | 99 | 998 | 98802 | -// | 99 | 997 | 98703 | -// -// {es-pull}109583[#109583] -// -// [discrete] -// [[always_allow_rebalancing_by_default]] -// === Always allow rebalancing by default -// In earlier versions of {es} the `cluster.routing.allocation.allow_rebalance` setting defaults to -// `indices_all_active` which blocks all rebalancing moves while the cluster is in `yellow` or `red` health. This was -// appropriate for the legacy allocator which might do too many rebalancing moves otherwise. Today's allocator has -// better support for rebalancing a cluster that is not in `green` health, and expects to be able to rebalance some -// shards away from over-full nodes to avoid allocating shards to undesirable locations in the first place. From -// version 8.16 `allow_rebalance` setting defaults to `always` unless the legacy allocator is explicitly enabled. -// -// {es-pull}111015[#111015] -// -// [discrete] -// [[add_global_retention_in_data_stream_lifecycle]] -// === Add global retention in data stream lifecycle -// Data stream lifecycle now supports configuring retention on a cluster level, -// namely global retention. Global retention \nallows us to configure two different -// retentions: -// -// - `data_streams.lifecycle.retention.default` is applied to all data streams managed -// by the data stream lifecycle that do not have retention defined on the data stream level. -// - `data_streams.lifecycle.retention.max` is applied to all data streams managed by the -// data stream lifecycle and it allows any data stream \ndata to be deleted after the `max_retention` has passed. -// -// {es-pull}111972[#111972] -// -// [discrete] -// [[enable_zstandard_compression_for_indices_with_index_codec_set_to_best_compression]] -// === Enable ZStandard compression for indices with index.codec set to best_compression -// Before DEFLATE compression was used to compress stored fields in indices with index.codec index setting set to -// best_compression, with this change ZStandard is used as compression algorithm to stored fields for indices with -// index.codec index setting set to best_compression. The usage ZStandard results in less storage usage with a -// similar indexing throughput depending on what options are used. Experiments with indexing logs have shown that -// ZStandard offers ~12% lower storage usage and a ~14% higher indexing throughput compared to DEFLATE. -// -// {es-pull}112665[#112665] -// -// [discrete] -// [[esql_introduce_per_agg_filter]] -// === ESQL: Introduce per agg filter -// Add support for aggregation scoped filters that work dynamically on the -// data in each group. -// -// [source,esql] -// ---- -// | STATS success = COUNT(*) WHERE 200 <= code AND code < 300, -// redirect = COUNT(*) WHERE 300 <= code AND code < 400, -// client_err = COUNT(*) WHERE 400 <= code AND code < 500, -// server_err = COUNT(*) WHERE 500 <= code AND code < 600, -// total_count = COUNT(*) -// ---- -// -// Implementation wise, the base AggregateFunction has been extended to -// allow a filter to be passed on. This is required to incorporate the -// filter as part of the aggregate equality/identity which would fail with -// the filter as an external component. -// As part of the process, the serialization for the existing aggregations -// had to be fixed so AggregateFunction implementations so that it -// delegates to their parent first. -// -// {es-pull}113735[#113735] -// -// // end::notable-highlights[] -// -// -// [discrete] -// [[esql_multi_value_fields_supported_in_geospatial_predicates]] -// === ESQL: Multi-value fields supported in Geospatial predicates -// Supporting multi-value fields in `WHERE` predicates is a challenge due to not knowing whether `ALL` or `ANY` -// of the values in the field should pass the predicate. -// For example, should the field `age:[10,30]` pass the predicate `WHERE age>20` or not? -// This ambiguity does not exist with the spatial predicates -// `ST_INTERSECTS` and `ST_DISJOINT`, because the choice between `ANY` or `ALL` -// is implied by the predicate itself. -// Consider a predicate checking a field named `location` against a test geometry named `shape`: -// -// * `ST_INTERSECTS(field, shape)` - true if `ANY` value can intersect the shape -// * `ST_DISJOINT(field, shape)` - true only if `ALL` values are disjoint from the shape -// -// This works even if the shape argument is itself a complex or compound geometry. -// -// Similar logic exists for `ST_CONTAINS` and `ST_WITHIN` predicates, but these are not as easily solved -// with `ANY` or `ALL`, because a collection of geometries contains another collection if each of the contained -// geometries is within at least one of the containing geometries. Evaluating this requires that the multi-value -// field is first combined into a single geometry before performing the predicate check. -// -// * `ST_CONTAINS(field, shape)` - true if the combined geometry contains the shape -// * `ST_WITHIN(field, shape)` - true if the combined geometry is within the shape -// -// {es-pull}112063[#112063] -// -// [discrete] -// [[enhance_sort_push_down_to_lucene_to_cover_references_to_fields_st_distance_function]] -// === Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function -// The most used and likely most valuable geospatial search query in Elasticsearch is the sorted proximity search, -// finding items within a certain distance of a point of interest and sorting the results by distance. -// This has been possible in ES|QL since 8.15.0, but the sorting was done in-memory, not pushed down to Lucene. -// Now the sorting is pushed down to Lucene, which results in a significant performance improvement. -// -// Queries that perform both filtering and sorting on distance are supported. For example: -// -// [source,esql] -// ---- -// FROM test -// | EVAL distance = ST_DISTANCE(location, TO_GEOPOINT("POINT(37.7749, -122.4194)")) -// | WHERE distance < 1000000 -// | SORT distance ASC, name DESC -// | LIMIT 10 -// ---- -// -// In addition, the support for sorting on EVAL expressions has been extended to cover references to fields: -// -// [source,esql] -// ---- -// FROM test -// | EVAL ref = field -// | SORT ref ASC -// | LIMIT 10 -// ---- -// -// {es-pull}112938[#112938] -// + +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: +// tag::notable-highlights[] // [discrete] -// [[cross_cluster_search_telemetry]] -// === Cross-cluster search telemetry -// The cross-cluster search telemetry is collected when cross-cluster searches -// are performed, and is returned as "ccs" field in `_cluster/stats` output. -// It also add a new parameter `include_remotes=true` to the `_cluster/stats` API -// which will collect data from connected remote clusters. +// === Heading // -// {es-pull}113825[#113825] +// Description. +// end::notable-highlights[] + diff --git a/docs/reference/security/fips-140-compliance.asciidoc b/docs/reference/security/fips-140-compliance.asciidoc index 5bf73d43541d6..dec17927e62b8 100644 --- a/docs/reference/security/fips-140-compliance.asciidoc +++ b/docs/reference/security/fips-140-compliance.asciidoc @@ -53,8 +53,8 @@ https://docs.oracle.com/en/java/javase/17/security/java-cryptography-architectur https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html[JSSE] implementation is required so that the JVM uses FIPS validated implementations of NIST recommended cryptographic algorithms. -Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.4/bc-fips-1.0.2.4.jar[bc-fips 1.0.2.4] -and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.17/bctls-fips-1.0.17.jar[bctls-fips 1.0.17]. +Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.5/bc-fips-1.0.2.5.jar[bc-fips 1.0.2.5] +and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.19/bctls-fips-1.0.19.jar[bctls-fips 1.0.19]. Please refer to the {es} https://www.elastic.co/support/matrix#matrix_jvm[JVM support matrix] for details on which combinations of JVM and security provider are supported in FIPS mode. Elasticsearch does not ship with a FIPS certified provider. It is the responsibility of the user to install and configure the security provider to ensure compliance with FIPS 140-2. Using a FIPS certified provider will ensure that only diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3f56071f6f495..2f465e06a662a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -3288,14 +3288,14 @@ - - - + + + - - - + + + @@ -3333,9 +3333,9 @@ - - - + + + diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java index f5fe8d41c2243..a3bbb611f3e68 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImpl.java @@ -9,19 +9,29 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.objectweb.asm.ClassReader; +import org.objectweb.asm.ClassVisitor; +import org.objectweb.asm.MethodVisitor; +import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; +import java.io.IOException; import java.lang.reflect.Method; -import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.stream.Stream; public class InstrumentationServiceImpl implements InstrumentationService { + @Override - public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { + public Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods) { return new InstrumenterImpl(classNameSuffix, instrumentationMethods); } @@ -33,9 +43,97 @@ public MethodKey methodKeyForTarget(Method targetMethod) { return new MethodKey( Type.getInternalName(targetMethod.getDeclaringClass()), targetMethod.getName(), - Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList(), - Modifier.isStatic(targetMethod.getModifiers()) + Stream.of(actualType.getArgumentTypes()).map(Type::getInternalName).toList() ); } + @Override + public Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, + IOException { + var methodsToInstrument = new HashMap(); + var checkerClass = Class.forName(entitlementCheckerClassName); + var classFileInfo = InstrumenterImpl.getClassFileInfo(checkerClass); + ClassReader reader = new ClassReader(classFileInfo.bytecodes()); + ClassVisitor visitor = new ClassVisitor(Opcodes.ASM9) { + @Override + public MethodVisitor visitMethod( + int access, + String checkerMethodName, + String checkerMethodDescriptor, + String signature, + String[] exceptions + ) { + var mv = super.visitMethod(access, checkerMethodName, checkerMethodDescriptor, signature, exceptions); + + var checkerMethodArgumentTypes = Type.getArgumentTypes(checkerMethodDescriptor); + var methodToInstrument = parseCheckerMethodSignature(checkerMethodName, checkerMethodArgumentTypes); + + var checkerParameterDescriptors = Arrays.stream(checkerMethodArgumentTypes).map(Type::getDescriptor).toList(); + var checkerMethod = new CheckerMethod(Type.getInternalName(checkerClass), checkerMethodName, checkerParameterDescriptors); + + methodsToInstrument.put(methodToInstrument, checkerMethod); + + return mv; + } + }; + reader.accept(visitor, 0); + return methodsToInstrument; + } + + private static final Type CLASS_TYPE = Type.getType(Class.class); + + static MethodKey parseCheckerMethodSignature(String checkerMethodName, Type[] checkerMethodArgumentTypes) { + var classNameStartIndex = checkerMethodName.indexOf('$'); + var classNameEndIndex = checkerMethodName.lastIndexOf('$'); + + if (classNameStartIndex == -1 || classNameStartIndex >= classNameEndIndex) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect name format. " + + "It should be either check$$methodName (instance) or check$package_ClassName$methodName (static)", + checkerMethodName + ) + ); + } + + // No "className" (check$$methodName) -> method is static, and we'll get the class from the actual typed argument + final boolean targetMethodIsStatic = classNameStartIndex + 1 != classNameEndIndex; + final String targetMethodName = checkerMethodName.substring(classNameEndIndex + 1); + + final String targetClassName; + final List targetParameterTypes; + if (targetMethodIsStatic) { + if (checkerMethodArgumentTypes.length < 1 || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + "It must have a first argument of Class type.", + checkerMethodName + ) + ); + } + + targetClassName = checkerMethodName.substring(classNameStartIndex + 1, classNameEndIndex).replace('_', '/'); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(1).map(Type::getInternalName).toList(); + } else { + if (checkerMethodArgumentTypes.length < 2 + || CLASS_TYPE.equals(checkerMethodArgumentTypes[0]) == false + || checkerMethodArgumentTypes[1].getSort() != Type.OBJECT) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Checker method %s has incorrect argument types. " + + "It must have a first argument of Class type, and a second argument of the class containing the method to " + + "instrument", + checkerMethodName + ) + ); + } + var targetClassType = checkerMethodArgumentTypes[1]; + targetClassName = targetClassType.getInternalName(); + targetParameterTypes = Arrays.stream(checkerMethodArgumentTypes).skip(2).map(Type::getInternalName).toList(); + } + return new MethodKey(targetClassName, targetMethodName, targetParameterTypes); + } } diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 63c9ccd80be70..53e76372b107d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation.impl; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.Instrumenter; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.objectweb.asm.AnnotationVisitor; @@ -23,7 +24,6 @@ import java.io.IOException; import java.io.InputStream; -import java.lang.reflect.Method; import java.util.Map; import java.util.stream.Stream; @@ -40,9 +40,9 @@ public class InstrumenterImpl implements Instrumenter { * To avoid class name collisions during testing without an agent to replace classes in-place. */ private final String classNameSuffix; - private final Map instrumentationMethods; + private final Map instrumentationMethods; - public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { + public InstrumenterImpl(String classNameSuffix, Map instrumentationMethods) { this.classNameSuffix = classNameSuffix; this.instrumentationMethods = instrumentationMethods; } @@ -138,12 +138,7 @@ public MethodVisitor visitMethod(int access, String name, String descriptor, Str var mv = super.visitMethod(access, name, descriptor, signature, exceptions); if (isAnnotationPresent == false) { boolean isStatic = (access & ACC_STATIC) != 0; - var key = new MethodKey( - className, - name, - Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList(), - isStatic - ); + var key = new MethodKey(className, name, Stream.of(Type.getArgumentTypes(descriptor)).map(Type::getInternalName).toList()); var instrumentationMethod = instrumentationMethods.get(key); if (instrumentationMethod != null) { // LOGGER.debug("Will instrument method {}", key); @@ -177,7 +172,7 @@ private void addClassAnnotationIfNeeded() { class EntitlementMethodVisitor extends MethodVisitor { private final boolean instrumentedMethodIsStatic; private final String instrumentedMethodDescriptor; - private final Method instrumentationMethod; + private final CheckerMethod instrumentationMethod; private boolean hasCallerSensitiveAnnotation = false; EntitlementMethodVisitor( @@ -185,7 +180,7 @@ class EntitlementMethodVisitor extends MethodVisitor { MethodVisitor methodVisitor, boolean instrumentedMethodIsStatic, String instrumentedMethodDescriptor, - Method instrumentationMethod + CheckerMethod instrumentationMethod ) { super(api, methodVisitor); this.instrumentedMethodIsStatic = instrumentedMethodIsStatic; @@ -262,9 +257,12 @@ private void forwardIncomingArguments() { private void invokeInstrumentationMethod() { mv.visitMethodInsn( INVOKEINTERFACE, - Type.getInternalName(instrumentationMethod.getDeclaringClass()), - instrumentationMethod.getName(), - Type.getMethodDescriptor(instrumentationMethod), + instrumentationMethod.className(), + instrumentationMethod.methodName(), + Type.getMethodDescriptor( + Type.VOID_TYPE, + instrumentationMethod.parameterDescriptors().stream().map(Type::getType).toArray(Type[]::new) + ), true ); } diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java new file mode 100644 index 0000000000000..c0ff5d59d3c72 --- /dev/null +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests.java @@ -0,0 +1,262 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation.impl; + +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; +import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; +import org.elasticsearch.test.ESTestCase; +import org.objectweb.asm.Type; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.aMapWithSize; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; + +@ESTestCase.WithoutSecurityManager +public class InstrumentationServiceImplTests extends ESTestCase { + + final InstrumentationService instrumentationService = new InstrumentationServiceImpl(); + + static class TestTargetClass {} + + interface TestChecker { + void check$org_example_TestTargetClass$staticMethod(Class clazz, int arg0, String arg1, Object arg2); + + void check$$instanceMethodNoArgs(Class clazz, TestTargetClass that); + + void check$$instanceMethodWithArgs(Class clazz, TestTargetClass that, int x, int y); + } + + interface TestCheckerOverloads { + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, int y); + + void check$org_example_TestTargetClass$staticMethodWithOverload(Class clazz, int x, String y); + } + + public void testInstrumentationTargetLookup() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestChecker.class.getName()); + + assertThat(methodsMap, aMapWithSize(3)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethod", List.of("I", "java/lang/String", "java/lang/Object"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$org_example_TestTargetClass$staticMethod", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;", "Ljava/lang/Object;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodNoArgs", + List.of() + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodNoArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;" + ) + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethodWithArgs", + List.of("I", "I") + ) + ), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestChecker", + "check$$instanceMethodWithArgs", + List.of( + "Ljava/lang/Class;", + "Lorg/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass;", + "I", + "I" + ) + ) + ) + ) + ); + } + + public void testInstrumentationTargetLookupWithOverloads() throws IOException, ClassNotFoundException { + Map methodsMap = instrumentationService.lookupMethodsToInstrument(TestCheckerOverloads.class.getName()); + + assertThat(methodsMap, aMapWithSize(2)); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "java/lang/String"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "Ljava/lang/String;") + ) + ) + ) + ); + assertThat( + methodsMap, + hasEntry( + equalTo(new MethodKey("org/example/TestTargetClass", "staticMethodWithOverload", List.of("I", "I"))), + equalTo( + new CheckerMethod( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestCheckerOverloads", + "check$org_example_TestTargetClass$staticMethodWithOverload", + List.of("Ljava/lang/Class;", "I", "I") + ) + ) + ) + ); + } + + public void testParseCheckerMethodSignatureStaticMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureStaticMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$staticMethod", + new Type[] { Type.getType(Class.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass", "staticMethod", List.of("I", "java/lang/String")))); + } + + public void testParseCheckerMethodSignatureStaticMethodInnerClass() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$org_example_TestClass$InnerClass$staticMethod", + new Type[] { Type.getType(Class.class) } + ); + + assertThat(methodKey, equalTo(new MethodKey("org/example/TestClass$InnerClass", "staticMethod", List.of()))); + } + + public void testParseCheckerMethodSignatureIncorrectName() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$staticMethod", new Type[] { Type.getType(Class.class) }) + ); + + assertThat(exception.getMessage(), containsString("has incorrect name format")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$ClassName$staticMethod", new Type[] {}) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureStaticMethodIncorrectArgumentType() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$ClassName$staticMethod", + new Type[] { Type.getType(String.class) } + ) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethod() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of() + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodWithArgs() { + var methodKey = InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType(TestTargetClass.class), Type.getType("I"), Type.getType(String.class) } + ); + + assertThat( + methodKey, + equalTo( + new MethodKey( + "org/elasticsearch/entitlement/instrumentation/impl/InstrumentationServiceImplTests$TestTargetClass", + "instanceMethod", + List.of("I", "java/lang/String") + ) + ) + ); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(String.class) }) + ); + assertThat(exception.getMessage(), containsString("It must have a first argument of Class type")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentCount() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature("check$$instanceMethod", new Type[] { Type.getType(Class.class) }) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } + + public void testParseCheckerMethodSignatureInstanceMethodIncorrectArgumentTypes2() { + var exception = assertThrows( + IllegalArgumentException.class, + () -> InstrumentationServiceImpl.parseCheckerMethodSignature( + "check$$instanceMethod", + new Type[] { Type.getType(Class.class), Type.getType("I") } + ) + ); + assertThat(exception.getMessage(), containsString("a second argument of the class containing the method to instrument")); + } +} diff --git a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java index 9a57e199d4907..e3f5539999be5 100644 --- a/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java +++ b/libs/entitlement/asm-provider/src/test/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterTests.java @@ -11,7 +11,9 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; +import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.ESTestCase; @@ -22,11 +24,12 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Arrays; -import java.util.stream.Collectors; +import java.util.Map; import static org.elasticsearch.entitlement.instrumentation.impl.ASMUtils.bytecode2text; import static org.elasticsearch.entitlement.instrumentation.impl.InstrumenterImpl.getClassFileInfo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; import static org.objectweb.asm.Opcodes.INVOKESTATIC; /** @@ -53,7 +56,12 @@ public void initialize() { * Contains all the virtual methods from {@link ClassToInstrument}, * allowing this test to call them on the dynamically loaded instrumented class. */ - public interface Testable {} + public interface Testable { + // This method is here to demonstrate Instrumenter does not get confused by overloads + void someMethod(int arg); + + void someMethod(int arg, String anotherArg); + } /** * This is a placeholder for real class library methods. @@ -71,10 +79,26 @@ public static void systemExit(int status) { public static void anotherSystemExit(int status) { assertEquals(123, status); } + + public void someMethod(int arg) {} + + public void someMethod(int arg, String anotherArg) {} + + public static void someStaticMethod(int arg) {} + + public static void someStaticMethod(int arg, String anotherArg) {} } static final class TestException extends RuntimeException {} + public interface MockEntitlementChecker extends EntitlementChecker { + void checkSomeStaticMethod(Class clazz, int arg); + + void checkSomeStaticMethod(Class clazz, int arg, String anotherArg); + + void checkSomeInstanceMethod(Class clazz, Testable that, int arg, String anotherArg); + } + /** * We're not testing the permission checking logic here; * only that the instrumented methods are calling the correct check methods with the correct arguments. @@ -82,7 +106,7 @@ static final class TestException extends RuntimeException {} * just to demonstrate that the injected bytecodes succeed in calling these methods. * It also asserts that the arguments are correct. */ - public static class TestEntitlementChecker implements EntitlementChecker { + public static class TestEntitlementChecker implements MockEntitlementChecker { /** * This allows us to test that the instrumentation is correct in both cases: * if the check throws, and if it doesn't. @@ -90,9 +114,12 @@ public static class TestEntitlementChecker implements EntitlementChecker { volatile boolean isActive; int checkSystemExitCallCount = 0; + int checkSomeStaticMethodIntCallCount = 0; + int checkSomeStaticMethodIntStringCallCount = 0; + int checkSomeInstanceMethodCallCount = 0; @Override - public void checkSystemExit(Class callerClass, int status) { + public void check$java_lang_System$exit(Class callerClass, int status) { checkSystemExitCallCount++; assertSame(InstrumenterTests.class, callerClass); assertEquals(123, status); @@ -104,11 +131,48 @@ private void throwIfActive() { throw new TestException(); } } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg) { + checkSomeStaticMethodIntCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + throwIfActive(); + } + + @Override + public void checkSomeStaticMethod(Class callerClass, int arg, String anotherArg) { + checkSomeStaticMethodIntStringCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertEquals(123, arg); + assertEquals("abc", anotherArg); + throwIfActive(); + } + + @Override + public void checkSomeInstanceMethod(Class callerClass, Testable that, int arg, String anotherArg) { + checkSomeInstanceMethodCallCount++; + assertSame(InstrumenterTests.class, callerClass); + assertThat( + that.getClass().getName(), + startsWith("org.elasticsearch.entitlement.instrumentation.impl.InstrumenterTests$ClassToInstrument") + ); + assertEquals(123, arg); + assertEquals("def", anotherArg); + throwIfActive(); + } } public void testClassIsInstrumented() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); @@ -117,7 +181,7 @@ public void testClassIsInstrumented() throws Exception { } Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW", + classToInstrument.getName() + "_NEW", newBytecode ); @@ -134,7 +198,14 @@ public void testClassIsInstrumented() throws Exception { public void testClassIsNotInstrumentedTwice() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -146,7 +217,7 @@ public void testClassIsNotInstrumentedTwice() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -159,7 +230,16 @@ public void testClassIsNotInstrumentedTwice() throws Exception { public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { var classToInstrument = ClassToInstrument.class; - var instrumenter = createInstrumenter(classToInstrument, "systemExit", "anotherSystemExit"); + + CheckerMethod checkerMethod = getCheckerMethod(EntitlementChecker.class, "check$java_lang_System$exit", Class.class, int.class); + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("systemExit", int.class)), + checkerMethod, + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("anotherSystemExit", int.class)), + checkerMethod + ); + + var instrumenter = createInstrumenter(methods); InstrumenterImpl.ClassFileInfo initial = getClassFileInfo(classToInstrument); var internalClassName = Type.getInternalName(classToInstrument); @@ -171,7 +251,7 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { logger.trace(() -> Strings.format("Bytecode after 2nd instrumentation:\n%s", bytecode2text(instrumentedTwiceBytecode))); Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( - ClassToInstrument.class.getName() + "_NEW_NEW", + classToInstrument.getName() + "_NEW_NEW", instrumentedTwiceBytecode ); @@ -185,22 +265,78 @@ public void testClassAllMethodsAreInstrumentedFirstPass() throws Exception { assertThat(getTestEntitlementChecker().checkSystemExitCallCount, is(2)); } - /** This test doesn't replace ClassToInstrument in-place but instead loads a separate - * class ClassToInstrument_NEW that contains the instrumentation. Because of this, - * we need to configure the Transformer to use a MethodKey and instrumentationMethod - * with slightly different signatures (using the common interface Testable) which - * is not what would happen when it's run by the agent. - */ - private InstrumenterImpl createInstrumenter(Class classToInstrument, String... methodNames) throws NoSuchMethodException { - Method v1 = EntitlementChecker.class.getMethod("checkSystemExit", Class.class, int.class); - var methods = Arrays.stream(methodNames).map(name -> { - try { - return instrumentationService.methodKeyForTarget(classToInstrument.getMethod(name, int.class)); - } catch (NoSuchMethodException e) { - throw new RuntimeException(e); - } - }).collect(Collectors.toUnmodifiableMap(name -> name, name -> v1)); + public void testInstrumenterWorksWithOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class), + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someStaticMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeStaticMethod", Class.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + // After checking is activated, everything should throw + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123)); + assertThrows(TestException.class, () -> callStaticMethod(newClass, "someStaticMethod", 123, "abc")); + + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntCallCount, is(1)); + assertThat(getTestEntitlementChecker().checkSomeStaticMethodIntStringCallCount, is(1)); + } + + public void testInstrumenterWorksWithInstanceMethodsAndOverloads() throws Exception { + var classToInstrument = ClassToInstrument.class; + + Map methods = Map.of( + instrumentationService.methodKeyForTarget(classToInstrument.getMethod("someMethod", int.class, String.class)), + getCheckerMethod(MockEntitlementChecker.class, "checkSomeInstanceMethod", Class.class, Testable.class, int.class, String.class) + ); + + var instrumenter = createInstrumenter(methods); + + byte[] newBytecode = instrumenter.instrumentClassFile(classToInstrument).bytecodes(); + + if (logger.isTraceEnabled()) { + logger.trace("Bytecode after instrumentation:\n{}", bytecode2text(newBytecode)); + } + + Class newClass = new TestLoader(Testable.class.getClassLoader()).defineClassFromBytes( + classToInstrument.getName() + "_NEW", + newBytecode + ); + + getTestEntitlementChecker().isActive = true; + + Testable testTargetClass = (Testable) (newClass.getConstructor().newInstance()); + + // This overload is not instrumented, so it will not throw + testTargetClass.someMethod(123); + assertThrows(TestException.class, () -> testTargetClass.someMethod(123, "def")); + + assertThat(getTestEntitlementChecker().checkSomeInstanceMethodCallCount, is(1)); + } + + /** This test doesn't replace classToInstrument in-place but instead loads a separate + * class with the same class name plus a "_NEW" suffix (classToInstrument.class.getName() + "_NEW") + * that contains the instrumentation. Because of this, we need to configure the Transformer to use a + * MethodKey and instrumentationMethod with slightly different signatures (using the common interface + * Testable) which is not what would happen when it's run by the agent. + */ + private InstrumenterImpl createInstrumenter(Map methods) throws NoSuchMethodException { Method getter = InstrumenterTests.class.getMethod("getTestEntitlementChecker"); return new InstrumenterImpl("_NEW", methods) { /** @@ -220,13 +356,38 @@ protected void pushEntitlementChecker(MethodVisitor mv) { }; } + private static CheckerMethod getCheckerMethod(Class clazz, String methodName, Class... parameterTypes) + throws NoSuchMethodException { + var method = clazz.getMethod(methodName, parameterTypes); + return new CheckerMethod( + Type.getInternalName(clazz), + method.getName(), + Arrays.stream(Type.getArgumentTypes(method)).map(Type::getDescriptor).toList() + ); + } + /** * Calling a static method of a dynamically loaded class is significantly more cumbersome * than calling a virtual method. */ - private static void callStaticMethod(Class c, String methodName, int status) throws NoSuchMethodException, IllegalAccessException { + private static void callStaticMethod(Class c, String methodName, int arg) throws NoSuchMethodException, IllegalAccessException { + try { + c.getMethod(methodName, int.class).invoke(null, arg); + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + if (cause instanceof TestException n) { + // Sometimes we're expecting this one! + throw n; + } else { + throw new AssertionError(cause); + } + } + } + + private static void callStaticMethod(Class c, String methodName, int arg1, String arg2) throws NoSuchMethodException, + IllegalAccessException { try { - c.getMethod(methodName, int.class).invoke(null, status); + c.getMethod(methodName, int.class, String.class).invoke(null, arg1, arg2); } catch (InvocationTargetException e) { Throwable cause = e.getCause(); if (cause instanceof TestException n) { diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 5ebb7d00e26f5..167c93c90df5c 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -10,5 +10,5 @@ package org.elasticsearch.entitlement.bridge; public interface EntitlementChecker { - void checkSystemExit(Class callerClass, int status); + void check$java_lang_System$exit(Class callerClass, int status); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 7f68457baea9e..01b8f4d574f90 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -15,6 +15,7 @@ import com.sun.tools.attach.VirtualMachine; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -22,15 +23,33 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Collection; +import java.util.Objects; +import java.util.function.Function; public class EntitlementBootstrap { + public record BootstrapArgs(Collection> pluginData, Function, String> pluginResolver) {} + + private static BootstrapArgs bootstrapArgs; + + public static BootstrapArgs bootstrapArgs() { + return bootstrapArgs; + } + /** - * Activates entitlement checking. Once this method returns, calls to forbidden methods - * will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * Activates entitlement checking. Once this method returns, calls to methods protected by Entitlements from classes without a valid + * policy will throw {@link org.elasticsearch.entitlement.runtime.api.NotEntitledException}. + * @param pluginData a collection of (plugin path, boolean), that holds the paths of all the installed Elasticsearch modules and + * plugins, and whether they are Java modular or not. + * @param pluginResolver a functor to map a Java Class to the plugin it belongs to (the plugin name). */ - public static void bootstrap() { + public static void bootstrap(Collection> pluginData, Function, String> pluginResolver) { logger.debug("Loading entitlement agent"); + if (EntitlementBootstrap.bootstrapArgs != null) { + throw new IllegalStateException("plugin data is already set"); + } + EntitlementBootstrap.bootstrapArgs = new BootstrapArgs(Objects.requireNonNull(pluginData), Objects.requireNonNull(pluginResolver)); exportInitializationToAgent(); loadAgent(findAgentJar()); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 155d5a27c606b..6d31abe4cf054 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -9,17 +9,35 @@ package org.elasticsearch.entitlement.initialization; +import org.elasticsearch.core.Tuple; import org.elasticsearch.core.internal.provider.ProviderLocator; +import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.bridge.EntitlementChecker; +import org.elasticsearch.entitlement.instrumentation.CheckerMethod; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.instrumentation.MethodKey; import org.elasticsearch.entitlement.instrumentation.Transformer; import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.Policy; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; +import org.elasticsearch.entitlement.runtime.policy.PolicyParser; +import org.elasticsearch.entitlement.runtime.policy.Scope; +import java.io.IOException; import java.lang.instrument.Instrumentation; -import java.lang.reflect.Method; +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; + +import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; /** * Called by the agent during {@code agentmain} to configure the entitlement system, @@ -29,6 +47,9 @@ * to begin injecting our instrumentation. */ public class EntitlementInitialization { + + private static final String POLICY_FILE_NAME = "entitlement-policy.yaml"; + private static ElasticsearchEntitlementChecker manager; // Note: referenced by bridge reflectively @@ -38,16 +59,82 @@ public static EntitlementChecker checker() { // Note: referenced by agent reflectively public static void initialize(Instrumentation inst) throws Exception { - manager = new ElasticsearchEntitlementChecker(); + manager = new ElasticsearchEntitlementChecker(createPolicyManager()); + + Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( + "org.elasticsearch.entitlement.bridge.EntitlementChecker" + ); + + var classesToTransform = methodMap.keySet().stream().map(MethodKey::className).collect(Collectors.toSet()); + + inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), classesToTransform), true); + // TODO: should we limit this array somehow? + var classesToRetransform = classesToTransform.stream().map(EntitlementInitialization::internalNameToClass).toArray(Class[]::new); + inst.retransformClasses(classesToRetransform); + } + + private static Class internalNameToClass(String internalName) { + try { + return Class.forName(internalName.replace('/', '.'), false, ClassLoader.getPlatformClassLoader()); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + private static PolicyManager createPolicyManager() throws IOException { + Map pluginPolicies = createPluginPolicies(EntitlementBootstrap.bootstrapArgs().pluginData()); + + // TODO: What should the name be? + // TODO(ES-10031): Decide what goes in the elasticsearch default policy and extend it + var serverPolicy = new Policy("server", List.of()); + return new PolicyManager(serverPolicy, pluginPolicies, EntitlementBootstrap.bootstrapArgs().pluginResolver()); + } + + private static Map createPluginPolicies(Collection> pluginData) throws IOException { + Map pluginPolicies = new HashMap<>(pluginData.size()); + for (Tuple entry : pluginData) { + Path pluginRoot = entry.v1(); + boolean isModular = entry.v2(); + + String pluginName = pluginRoot.getFileName().toString(); + final Policy policy = loadPluginPolicy(pluginRoot, isModular, pluginName); + + pluginPolicies.put(pluginName, policy); + } + return pluginPolicies; + } + + private static Policy loadPluginPolicy(Path pluginRoot, boolean isModular, String pluginName) throws IOException { + Path policyFile = pluginRoot.resolve(POLICY_FILE_NAME); + + final Set moduleNames = getModuleNames(pluginRoot, isModular); + final Policy policy = parsePolicyIfExists(pluginName, policyFile); + + // TODO: should this check actually be part of the parser? + for (Scope scope : policy.scopes) { + if (moduleNames.contains(scope.name) == false) { + throw new IllegalStateException("policy [" + policyFile + "] contains invalid module [" + scope.name + "]"); + } + } + return policy; + } + + private static Policy parsePolicyIfExists(String pluginName, Path policyFile) throws IOException { + if (Files.exists(policyFile)) { + return new PolicyParser(Files.newInputStream(policyFile, StandardOpenOption.READ), pluginName).parsePolicy(); + } + return new Policy(pluginName, List.of()); + } - // TODO: Configure actual entitlement grants instead of this hardcoded one - Method targetMethod = System.class.getMethod("exit", int.class); - Method instrumentationMethod = Class.forName("org.elasticsearch.entitlement.bridge.EntitlementChecker") - .getMethod("checkSystemExit", Class.class, int.class); - Map methodMap = Map.of(INSTRUMENTER_FACTORY.methodKeyForTarget(targetMethod), instrumentationMethod); + private static Set getModuleNames(Path pluginRoot, boolean isModular) { + if (isModular) { + ModuleFinder moduleFinder = ModuleFinder.of(pluginRoot); + Set moduleReferences = moduleFinder.findAll(); - inst.addTransformer(new Transformer(INSTRUMENTER_FACTORY.newInstrumenter("", methodMap), Set.of(internalName(System.class))), true); - inst.retransformClasses(System.class); + return moduleReferences.stream().map(mr -> mr.descriptor().name()).collect(Collectors.toUnmodifiableSet()); + } + // When isModular == false we use the same "ALL-UNNAMED" constant as the JDK to indicate (any) unnamed module for this plugin + return Set.of(ALL_UNNAMED); } private static String internalName(Class c) { diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java new file mode 100644 index 0000000000000..c20a75a61a608 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/CheckerMethod.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.instrumentation; + +import java.util.List; + +/** + * A structure to use as a representation of the checker method the instrumentation will inject. + * + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the checker method name + * @param parameterDescriptors a list of + * type descriptors) + * for methodName parameters. + */ +public record CheckerMethod(String className, String methodName, List parameterDescriptors) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java index 25fa84ec7c4ba..12316bfb043c5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/InstrumentationService.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.instrumentation; +import java.io.IOException; import java.lang.reflect.Method; import java.util.Map; @@ -16,10 +17,12 @@ * The SPI service entry point for instrumentation. */ public interface InstrumentationService { - Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); + Instrumenter newInstrumenter(String classNameSuffix, Map instrumentationMethods); /** * @return a {@link MethodKey} suitable for looking up the given {@code targetMethod} in the entitlements trampoline */ MethodKey methodKeyForTarget(Method targetMethod); + + Map lookupMethodsToInstrument(String entitlementCheckerClassName) throws ClassNotFoundException, IOException; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java index 54e09c10bcc57..256a4d709d9dc 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/instrumentation/MethodKey.java @@ -12,7 +12,10 @@ import java.util.List; /** + * A structure to use as a key/lookup for a method target of instrumentation * - * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param className the "internal name" of the class: includes the package info, but with periods replaced by slashes + * @param methodName the method name + * @param parameterTypes a list of "internal names" for the parameter types */ -public record MethodKey(String className, String methodName, List parameterTypes, boolean isStatic) {} +public record MethodKey(String className, String methodName, List parameterTypes) {} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 6d5dbd4098aa9..790416ca5659a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,10 +10,8 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.logging.LogManager; -import org.elasticsearch.logging.Logger; - -import java.util.Optional; +import org.elasticsearch.entitlement.runtime.policy.FlagEntitlementType; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; /** * Implementation of the {@link EntitlementChecker} interface, providing additional @@ -21,51 +19,14 @@ * The trampoline module loads this object via SPI. */ public class ElasticsearchEntitlementChecker implements EntitlementChecker { - private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + private final PolicyManager policyManager; - @Override - public void checkSystemExit(Class callerClass, int status) { - var requestingModule = requestingModule(callerClass); - if (isTriviallyAllowed(requestingModule)) { - return; - } - // Hard-forbidden until we develop the permission granting scheme - throw new NotEntitledException("Missing entitlement for " + requestingModule); + public ElasticsearchEntitlementChecker(PolicyManager policyManager) { + this.policyManager = policyManager; } - private static Module requestingModule(Class callerClass) { - if (callerClass != null) { - Module callerModule = callerClass.getModule(); - if (callerModule.getLayer() != ModuleLayer.boot()) { - // fast path - return callerModule; - } - } - int framesToSkip = 1 // getCallingClass (this method) - + 1 // the checkXxx method - + 1 // the runtime config method - + 1 // the instrumented method - ; - Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) - .walk( - s -> s.skip(framesToSkip) - .map(f -> f.getDeclaringClass().getModule()) - .filter(m -> m.getLayer() != ModuleLayer.boot()) - .findFirst() - ); - return module.orElse(null); - } - - private static boolean isTriviallyAllowed(Module requestingModule) { - if (requestingModule == null) { - logger.debug("Trivially allowed: Entire call stack is in the boot module layer"); - return true; - } - if (requestingModule == System.class.getModule()) { - logger.debug("Trivially allowed: Caller is in {}", System.class.getModule().getName()); - return true; - } - logger.trace("Not trivially allowed"); - return false; + @Override + public void check$java_lang_System$exit(Class callerClass, int status) { + policyManager.checkFlagEntitlement(callerClass, FlagEntitlementType.SYSTEM_EXIT); } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java similarity index 57% rename from build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java rename to libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java index f7ee88c715dfa..60490baf41a10 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/Fixture.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FlagEntitlementType.java @@ -7,15 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.gradle.internal.test; - -/** - * Any object that can produce an accompanying stop task, meant to tear down - * a previously instantiated service. - */ -public interface Fixture { - - /** A task which will stop this fixture. This should be used as a finalizedBy for any tasks that use the fixture. */ - Object getStopTask(); +package org.elasticsearch.entitlement.runtime.policy; +public enum FlagEntitlementType { + SYSTEM_EXIT; } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java new file mode 100644 index 0000000000000..c06dc09758de5 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import org.elasticsearch.core.Strings; +import org.elasticsearch.entitlement.runtime.api.ElasticsearchEntitlementChecker; +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; + +import java.lang.module.ModuleFinder; +import java.lang.module.ModuleReference; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +public class PolicyManager { + private static final Logger logger = LogManager.getLogger(ElasticsearchEntitlementChecker.class); + + protected final Policy serverPolicy; + protected final Map pluginPolicies; + private final Function, String> pluginResolver; + + public static final String ALL_UNNAMED = "ALL-UNNAMED"; + + private static final Set systemModules = findSystemModules(); + + private static Set findSystemModules() { + var systemModulesDescriptors = ModuleFinder.ofSystem() + .findAll() + .stream() + .map(ModuleReference::descriptor) + .collect(Collectors.toUnmodifiableSet()); + + return ModuleLayer.boot() + .modules() + .stream() + .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) + .collect(Collectors.toUnmodifiableSet()); + } + + public PolicyManager(Policy defaultPolicy, Map pluginPolicies, Function, String> pluginResolver) { + this.serverPolicy = Objects.requireNonNull(defaultPolicy); + this.pluginPolicies = Collections.unmodifiableMap(Objects.requireNonNull(pluginPolicies)); + this.pluginResolver = pluginResolver; + } + + public void checkFlagEntitlement(Class callerClass, FlagEntitlementType type) { + var requestingModule = requestingModule(callerClass); + if (isTriviallyAllowed(requestingModule)) { + return; + } + + // TODO: real policy check. For now, we only allow our hardcoded System.exit policy for server. + // TODO: this will be checked using policies + if (requestingModule.isNamed() + && requestingModule.getName().equals("org.elasticsearch.server") + && type == FlagEntitlementType.SYSTEM_EXIT) { + logger.debug("Allowed: caller [{}] in module [{}] has entitlement [{}]", callerClass, requestingModule.getName(), type); + return; + } + + // TODO: plugins policy check using pluginResolver and pluginPolicies + throw new NotEntitledException( + Strings.format("Missing entitlement [%s] for caller [%s] in module [%s]", type, callerClass, requestingModule.getName()) + ); + } + + private static Module requestingModule(Class callerClass) { + if (callerClass != null) { + Module callerModule = callerClass.getModule(); + if (systemModules.contains(callerModule) == false) { + // fast path + return callerModule; + } + } + int framesToSkip = 1 // getCallingClass (this method) + + 1 // the checkXxx method + + 1 // the runtime config method + + 1 // the instrumented method + ; + Optional module = StackWalker.getInstance(StackWalker.Option.RETAIN_CLASS_REFERENCE) + .walk( + s -> s.skip(framesToSkip) + .map(f -> f.getDeclaringClass().getModule()) + .filter(m -> systemModules.contains(m) == false) + .findFirst() + ); + return module.orElse(null); + } + + private static boolean isTriviallyAllowed(Module requestingModule) { + if (requestingModule == null) { + logger.debug("Trivially allowed: entire call stack is in composed of classes in system modules"); + return true; + } + logger.trace("Not trivially allowed"); + return false; + } + + @Override + public String toString() { + return "PolicyManager{" + "serverPolicy=" + serverPolicy + ", pluginPolicies=" + pluginPolicies + '}'; + } +} diff --git a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java index 4fd471c529e75..02d0491118dc7 100644 --- a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java @@ -157,7 +157,9 @@ private static void debugThreadGroups(final ThreadGroup caller, final ThreadGrou // Returns true if the given thread is an instance of the JDK's InnocuousThread. private static boolean isInnocuousThread(Thread t) { final Class c = t.getClass(); - return c.getModule() == Object.class.getModule() && c.getName().equals("jdk.internal.misc.InnocuousThread"); + return c.getModule() == Object.class.getModule() + && (c.getName().equals("jdk.internal.misc.InnocuousThread") + || c.getName().equals("java.util.concurrent.ForkJoinWorkerThread$InnocuousForkJoinWorkerThread")); } protected void checkThreadAccess(Thread t) { @@ -184,11 +186,21 @@ protected void checkThreadAccess(Thread t) { private static final Permission MODIFY_THREADGROUP_PERMISSION = new RuntimePermission("modifyThreadGroup"); private static final Permission MODIFY_ARBITRARY_THREADGROUP_PERMISSION = new ThreadPermission("modifyArbitraryThreadGroup"); + // Returns true if the given thread is an instance of the JDK's InnocuousThread. + private static boolean isInnocuousThreadGroup(ThreadGroup t) { + final Class c = t.getClass(); + return c.getModule() == Object.class.getModule() && t.getName().equals("InnocuousForkJoinWorkerThreadGroup"); + } + protected void checkThreadGroupAccess(ThreadGroup g) { Objects.requireNonNull(g); + boolean targetThreadGroupIsInnocuous = isInnocuousThreadGroup(g); + // first, check if we can modify thread groups at all. - checkPermission(MODIFY_THREADGROUP_PERMISSION); + if (targetThreadGroupIsInnocuous == false) { + checkPermission(MODIFY_THREADGROUP_PERMISSION); + } // check the threadgroup, if its our thread group or an ancestor, its fine. final ThreadGroup source = Thread.currentThread().getThreadGroup(); @@ -196,7 +208,7 @@ protected void checkThreadGroupAccess(ThreadGroup g) { if (source == null) { return; // we are a dead thread, do nothing - } else if (source.parentOf(target) == false) { + } else if (source.parentOf(target) == false && targetThreadGroupIsInnocuous == false) { checkPermission(MODIFY_ARBITRARY_THREADGROUP_PERMISSION); } } diff --git a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java index b94639414ffe5..69c6973f57cdf 100644 --- a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java @@ -14,7 +14,10 @@ import java.security.Permission; import java.security.Policy; import java.security.ProtectionDomain; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; /** Simple tests for SecureSM */ public class SecureSMTests extends TestCase { @@ -128,4 +131,12 @@ public void run() { t1.join(); assertTrue(interrupted1.get()); } + + public void testParallelStreamThreadGroup() throws Exception { + List list = new ArrayList<>(); + for (int i = 0; i < 100; ++i) { + list.add(i); + } + list.parallelStream().collect(Collectors.toSet()); + } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index 29e8aec00a02d..203105edc5a24 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -188,17 +188,16 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } } try (LongArray bucketOrdsToBuild = bigArrays().newLongArray(totalBucketsToBuild)) { - int builtBucketIndex = 0; + int[] builtBucketIndex = new int[] { 0 }; for (int ord = 0; ord < maxOrd; ord++) { if (bucketDocCount(ord) > 0) { - bucketOrdsToBuild.set(builtBucketIndex++, ord); + bucketOrdsToBuild.set(builtBucketIndex[0]++, ord); } } - assert builtBucketIndex == totalBucketsToBuild; - builtBucketIndex = 0; + assert builtBucketIndex[0] == totalBucketsToBuild; + builtBucketIndex[0] = 0; var bucketSubAggs = buildSubAggsForBuckets(bucketOrdsToBuild); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int owningBucketOrdIdx = 0; owningBucketOrdIdx < results.length; owningBucketOrdIdx++) { + InternalAggregation[] aggregations = buildAggregations(Math.toIntExact(owningBucketOrds.size()), owningBucketOrdIdx -> { List buckets = new ArrayList<>(filters.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrds.get(owningBucketOrdIdx), i); @@ -207,10 +206,11 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw // a date-histogram where we will look for transactions over time and can expect many // empty buckets. if (docCount > 0) { + checkRealMemoryCBForInternalBucket(); InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( keys[i], docCount, - bucketSubAggs.apply(builtBucketIndex++) + bucketSubAggs.apply(builtBucketIndex[0]++) ); buckets.add(bucket); } @@ -226,17 +226,17 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket( intersectKey, docCount, - bucketSubAggs.apply(builtBucketIndex++) + bucketSubAggs.apply(builtBucketIndex[0]++) ); buckets.add(bucket); } pos++; } } - results[owningBucketOrdIdx] = new InternalAdjacencyMatrix(name, buckets, metadata()); - } - assert builtBucketIndex == totalBucketsToBuild; - return results; + return new InternalAdjacencyMatrix(name, buckets, metadata()); + }); + assert builtBucketIndex[0] == totalBucketsToBuild; + return aggregations; } } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 6f36f1f17bf8b..824f009bc7d8e 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), key); builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -237,7 +235,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 42aa79f990fc6..edb7ec4cffce7 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -99,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, DocValueFormat format) throws IOException { String keyAsString = format.format(key).toString(); builder.startObject(); if (format != DocValueFormat.RAW) { @@ -110,7 +109,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -597,7 +595,7 @@ private BucketReduceResult mergeConsecutiveBuckets( public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, format); } builder.endArray(); builder.field("interval", getInterval().toString()); diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index c4cdacd135cb4..d7590f2126325 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -36,24 +36,21 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalTimeSeries.InternalBucket(in, keyed)); + buckets.add(new InternalTimeSeries.InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -162,7 +156,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -252,14 +246,14 @@ public InternalTimeSeries create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { InternalTimeSeries.InternalBucket reduced = null; for (InternalTimeSeries.InternalBucket bucket : buckets) { if (reduced == null) { - reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); + reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations); } else { reduced.docCount += bucket.docCount; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index 1263d4282a18a..63472bca1d9ac 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -79,11 +79,11 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw while (ordsEnum.next()) { long docCount = bucketDocCount(ordsEnum.ord()); ordsEnum.readValue(spare); + checkRealMemoryCBForInternalBucket(); InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. docCount, - null, - keyed + null ); bucket.bucketOrd = ordsEnum.ord(); buckets.add(bucket); @@ -101,11 +101,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } buildSubAggsForAllBuckets(allBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(allBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(allBucketsPerOrd.get(ordIdx)); - } - return result; + return buildAggregations(Math.toIntExact(allBucketsPerOrd.size()), ordIdx -> buildResult(allBucketsPerOrd.get(ordIdx))); } } diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java index e61c02e0b9cd2..3b67d09c0d6a1 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java @@ -49,7 +49,7 @@ private List randomBuckets(boolean keyed, InternalAggregations a } try { var key = TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef(); - bucketList.add(new InternalBucket(key, docCount, aggregations, keyed)); + bucketList.add(new InternalBucket(key, docCount, aggregations)); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -108,10 +108,10 @@ public void testReduceSimple() { InternalTimeSeries first = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY) ), false, Map.of() @@ -119,8 +119,8 @@ public void testReduceSimple() { InternalTimeSeries second = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY) ), false, Map.of() @@ -128,9 +128,9 @@ public void testReduceSimple() { InternalTimeSeries third = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY) ), false, Map.of() diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java index d9a4023457126..493b4bdc81860 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java @@ -176,19 +176,19 @@ public void testMultiBucketAggregationAsSubAggregation() throws IOException { InternalDateHistogram byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=xxx}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=aaa, dim2=yyy}").docCount, equalTo(2L)); byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=yyy}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=bbb, dim2=zzz}").docCount, equalTo(4L)); byTimeStampBucket = ts.getBucketByKey("{dim1=bbb, dim2=zzz}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 4, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 4, null, InternalAggregations.EMPTY)) ); }; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java new file mode 100644 index 0000000000000..fdc96892d4b27 --- /dev/null +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportActionIT.java @@ -0,0 +1,152 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.rollover.RolloverRequestBuilder; +import org.elasticsearch.action.admin.indices.template.put.TransportPutComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.CreateDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Template; +import org.elasticsearch.datastreams.DataStreamsPlugin; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.TaskManager; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xcontent.XContentType; + +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class ReindexDataStreamTransportActionIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(DataStreamsPlugin.class); + } + + public void testNonExistentDataStream() { + String nonExistentDataStreamName = randomAlphaOfLength(50); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(nonExistentDataStreamName); + assertThrows( + ResourceNotFoundException.class, + () -> client().execute(new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest) + .actionGet() + ); + } + + public void testAlreadyUpToDateDataStream() throws Exception { + String dataStreamName = randomAlphaOfLength(50).toLowerCase(Locale.ROOT); + ReindexDataStreamRequest reindexDataStreamRequest = new ReindexDataStreamRequest(dataStreamName); + createDataStream(dataStreamName); + ReindexDataStreamResponse response = client().execute( + new ActionType(ReindexDataStreamAction.NAME), + reindexDataStreamRequest + ).actionGet(); + String persistentTaskId = response.getTaskId(); + assertThat(persistentTaskId, equalTo("reindex-data-stream-" + dataStreamName)); + AtomicReference runningTask = new AtomicReference<>(); + for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { + TaskManager taskManager = transportService.getTaskManager(); + Map tasksMap = taskManager.getCancellableTasks(); + Optional> optionalTask = taskManager.getCancellableTasks() + .entrySet() + .stream() + .filter(entry -> entry.getValue().getType().equals("persistent")) + .filter( + entry -> entry.getValue() instanceof ReindexDataStreamTask + && persistentTaskId.equals((((ReindexDataStreamTask) entry.getValue()).getPersistentTaskId())) + ) + .findAny(); + optionalTask.ifPresent( + longCancellableTaskEntry -> runningTask.compareAndSet(null, (ReindexDataStreamTask) longCancellableTaskEntry.getValue()) + ); + } + ReindexDataStreamTask task = runningTask.get(); + assertNotNull(task); + assertThat(task.getStatus().complete(), equalTo(true)); + assertNull(task.getStatus().exception()); + assertThat(task.getStatus().pending(), equalTo(0)); + assertThat(task.getStatus().inProgress(), equalTo(0)); + assertThat(task.getStatus().errors().size(), equalTo(0)); + } + + private void createDataStream(String dataStreamName) { + final TransportPutComposableIndexTemplateAction.Request putComposableTemplateRequest = + new TransportPutComposableIndexTemplateAction.Request("my-template"); + putComposableTemplateRequest.indexTemplate( + ComposableIndexTemplate.builder() + .indexPatterns(List.of(dataStreamName)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false)) + .template(Template.builder().build()) + .build() + ); + final AcknowledgedResponse putComposableTemplateResponse = safeGet( + client().execute(TransportPutComposableIndexTemplateAction.TYPE, putComposableTemplateRequest) + ); + assertThat(putComposableTemplateResponse.isAcknowledged(), is(true)); + + final CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request( + TEST_REQUEST_TIMEOUT, + TEST_REQUEST_TIMEOUT, + dataStreamName + ); + final AcknowledgedResponse createDataStreamResponse = safeGet( + client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest) + ); + assertThat(createDataStreamResponse.isAcknowledged(), is(true)); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + indexDocs(dataStreamName); + safeGet(new RolloverRequestBuilder(client()).setRolloverTarget(dataStreamName).lazy(false).execute()); + } + + private void indexDocs(String dataStreamName) { + int docs = randomIntBetween(5, 10); + CountDownLatch countDownLatch = new CountDownLatch(docs); + for (int i = 0; i < docs; i++) { + var indexRequest = new IndexRequest(dataStreamName).opType(DocWriteRequest.OpType.CREATE); + final String doc = "{ \"@timestamp\": \"2099-05-06T16:21:15.000Z\", \"message\": \"something cool happened\" }"; + indexRequest.source(doc, XContentType.JSON); + client().index(indexRequest, new ActionListener<>() { + @Override + public void onResponse(DocWriteResponse docWriteResponse) { + countDownLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + fail("Indexing request should have succeeded eventually, failed with " + e.getMessage()); + } + }); + } + safeAwait(countDownLatch); + } + +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index cb7445705537a..2f3b63d27ca35 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -19,19 +19,23 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.datastreams.ModifyDataStreamsAction; import org.elasticsearch.action.datastreams.PromoteDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; import org.elasticsearch.action.datastreams.lifecycle.ExplainDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.GetDataStreamLifecycleAction; import org.elasticsearch.action.datastreams.lifecycle.PutDataStreamLifecycleAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.datastreams.action.CreateDataStreamTransportAction; @@ -40,6 +44,7 @@ import org.elasticsearch.datastreams.action.MigrateToDataStreamTransportAction; import org.elasticsearch.datastreams.action.ModifyDataStreamsTransportAction; import org.elasticsearch.datastreams.action.PromoteDataStreamTransportAction; +import org.elasticsearch.datastreams.action.ReindexDataStreamTransportAction; import org.elasticsearch.datastreams.action.TransportGetDataStreamsAction; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleErrorStore; import org.elasticsearch.datastreams.lifecycle.DataStreamLifecycleService; @@ -73,14 +78,27 @@ import org.elasticsearch.datastreams.rest.RestMigrateToDataStreamAction; import org.elasticsearch.datastreams.rest.RestModifyDataStreamsAction; import org.elasticsearch.datastreams.rest.RestPromoteDataStreamAction; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskExecutor; +import org.elasticsearch.datastreams.task.ReindexDataStreamPersistentTaskState; +import org.elasticsearch.datastreams.task.ReindexDataStreamStatus; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexSettingProvider; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksExecutor; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.HealthPlugin; +import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ParseField; import java.io.IOException; import java.time.Clock; @@ -93,7 +111,7 @@ import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; -public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin { +public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlugin, PersistentTaskPlugin { public static final Setting TIME_SERIES_POLL_INTERVAL = Setting.timeSetting( "time_series.poll_interval", @@ -244,6 +262,7 @@ public Collection createComponents(PluginServices services) { actions.add(new ActionHandler<>(PutDataStreamOptionsAction.INSTANCE, TransportPutDataStreamOptionsAction.class)); actions.add(new ActionHandler<>(DeleteDataStreamOptionsAction.INSTANCE, TransportDeleteDataStreamOptionsAction.class)); } + actions.add(new ActionHandler<>(ReindexDataStreamAction.INSTANCE, ReindexDataStreamTransportAction.class)); return actions; } @@ -302,4 +321,48 @@ public void close() throws IOException { public Collection getHealthIndicatorServices() { return List.of(dataStreamLifecycleHealthIndicatorService.get()); } + + @Override + public List getNamedXContent() { + return List.of( + new NamedXContentRegistry.Entry( + PersistentTaskState.class, + new ParseField(ReindexDataStreamPersistentTaskState.NAME), + ReindexDataStreamPersistentTaskState::fromXContent + ), + new NamedXContentRegistry.Entry( + PersistentTaskParams.class, + new ParseField(ReindexDataStreamTaskParams.NAME), + ReindexDataStreamTaskParams::fromXContent + ) + ); + } + + @Override + public List getNamedWriteables() { + return List.of( + new NamedWriteableRegistry.Entry( + PersistentTaskState.class, + ReindexDataStreamPersistentTaskState.NAME, + ReindexDataStreamPersistentTaskState::new + ), + new NamedWriteableRegistry.Entry( + PersistentTaskParams.class, + ReindexDataStreamTaskParams.NAME, + ReindexDataStreamTaskParams::new + ), + new NamedWriteableRegistry.Entry(Task.Status.class, ReindexDataStreamStatus.NAME, ReindexDataStreamStatus::new) + ); + } + + @Override + public List> getPersistentTasksExecutor( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + SettingsModule settingsModule, + IndexNameExpressionResolver expressionResolver + ) { + return List.of(new ReindexDataStreamPersistentTaskExecutor(client, clusterService, ReindexDataStreamTask.TASK_NAME, threadPool)); + } } diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java new file mode 100644 index 0000000000000..0a86985c6c7b2 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/ReindexDataStreamTransportAction.java @@ -0,0 +1,93 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.action; + +import org.elasticsearch.ResourceAlreadyExistsException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamRequest; +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.datastreams.task.ReindexDataStreamTask; +import org.elasticsearch.datastreams.task.ReindexDataStreamTaskParams; +import org.elasticsearch.injection.guice.Inject; +import org.elasticsearch.persistent.PersistentTasksService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +/* + * This transport action creates a new persistent task for reindexing the source data stream given in the request. On successful creation + * of the persistent task, it responds with the persistent task id so that the user can monitor the persistent task. + */ +public class ReindexDataStreamTransportAction extends HandledTransportAction { + private final PersistentTasksService persistentTasksService; + private final TransportService transportService; + private final ClusterService clusterService; + + @Inject + public ReindexDataStreamTransportAction( + TransportService transportService, + ActionFilters actionFilters, + PersistentTasksService persistentTasksService, + ClusterService clusterService + ) { + super( + ReindexDataStreamAction.NAME, + true, + transportService, + actionFilters, + ReindexDataStreamRequest::new, + transportService.getThreadPool().executor(ThreadPool.Names.GENERIC) + ); + this.transportService = transportService; + this.persistentTasksService = persistentTasksService; + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, ReindexDataStreamRequest request, ActionListener listener) { + String sourceDataStreamName = request.getSourceDataStream(); + Metadata metadata = clusterService.state().metadata(); + DataStream dataStream = metadata.dataStreams().get(sourceDataStreamName); + if (dataStream == null) { + listener.onFailure(new ResourceNotFoundException("Data stream named [{}] does not exist", sourceDataStreamName)); + return; + } + int totalIndices = dataStream.getIndices().size(); + int totalIndicesToBeUpgraded = (int) dataStream.getIndices() + .stream() + .filter(index -> metadata.index(index).getCreationVersion().isLegacyIndexVersion()) + .count(); + ReindexDataStreamTaskParams params = new ReindexDataStreamTaskParams( + sourceDataStreamName, + transportService.getThreadPool().absoluteTimeInMillis(), + totalIndices, + totalIndicesToBeUpgraded + ); + String persistentTaskId = getPersistentTaskId(sourceDataStreamName); + persistentTasksService.sendStartRequest( + persistentTaskId, + ReindexDataStreamTask.TASK_NAME, + params, + null, + ActionListener.wrap(startedTask -> listener.onResponse(new ReindexDataStreamResponse(persistentTaskId)), listener::onFailure) + ); + } + + private String getPersistentTaskId(String dataStreamName) throws ResourceAlreadyExistsException { + return "reindex-data-stream-" + dataStreamName; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java new file mode 100644 index 0000000000000..f10d2e7b356fb --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskExecutor.java @@ -0,0 +1,121 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.datastreams.GetDataStreamAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.Index; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.persistent.PersistentTasksExecutor; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamPersistentTaskExecutor extends PersistentTasksExecutor { + private static final TimeValue TASK_KEEP_ALIVE_TIME = TimeValue.timeValueDays(1); + private final Client client; + private final ClusterService clusterService; + private final ThreadPool threadPool; + + public ReindexDataStreamPersistentTaskExecutor(Client client, ClusterService clusterService, String taskName, ThreadPool threadPool) { + super(taskName, threadPool.generic()); + this.client = client; + this.clusterService = clusterService; + this.threadPool = threadPool; + } + + @Override + protected ReindexDataStreamTask createTask( + long id, + String type, + String action, + TaskId parentTaskId, + PersistentTasksCustomMetadata.PersistentTask taskInProgress, + Map headers + ) { + ReindexDataStreamTaskParams params = taskInProgress.getParams(); + return new ReindexDataStreamTask( + params.startTime(), + params.totalIndices(), + params.totalIndicesToBeUpgraded(), + threadPool, + id, + type, + action, + "id=" + taskInProgress.getId(), + parentTaskId, + headers + ); + } + + @Override + protected void nodeOperation(AllocatedPersistentTask task, ReindexDataStreamTaskParams params, PersistentTaskState state) { + String sourceDataStream = params.getSourceDataStream(); + GetDataStreamAction.Request request = new GetDataStreamAction.Request(TimeValue.MAX_VALUE, new String[] { sourceDataStream }); + assert task instanceof ReindexDataStreamTask; + final ReindexDataStreamTask reindexDataStreamTask = (ReindexDataStreamTask) task; + client.execute(GetDataStreamAction.INSTANCE, request, ActionListener.wrap(response -> { + List dataStreamInfos = response.getDataStreams(); + if (dataStreamInfos.size() == 1) { + List indices = dataStreamInfos.getFirst().getDataStream().getIndices(); + List indicesToBeReindexed = indices.stream() + .filter(index -> clusterService.state().getMetadata().index(index).getCreationVersion().isLegacyIndexVersion()) + .toList(); + reindexDataStreamTask.setPendingIndices(indicesToBeReindexed.stream().map(Index::getName).toList()); + for (Index index : indicesToBeReindexed) { + // TODO This is just a placeholder. This is where the real data stream reindex logic will go + } + + completeSuccessfulPersistentTask(reindexDataStreamTask); + } else { + completeFailedPersistentTask(reindexDataStreamTask, new ElasticsearchException("data stream does not exist")); + } + }, reindexDataStreamTask::markAsFailed)); + } + + private void completeSuccessfulPersistentTask(ReindexDataStreamTask persistentTask) { + persistentTask.reindexSucceeded(); + threadPool.schedule(persistentTask::markAsCompleted, getTimeToLive(persistentTask), threadPool.generic()); + } + + private void completeFailedPersistentTask(ReindexDataStreamTask persistentTask, Exception e) { + persistentTask.reindexFailed(e); + threadPool.schedule(() -> persistentTask.markAsFailed(e), getTimeToLive(persistentTask), threadPool.generic()); + } + + private TimeValue getTimeToLive(ReindexDataStreamTask reindexDataStreamTask) { + PersistentTasksCustomMetadata persistentTasksCustomMetadata = clusterService.state() + .getMetadata() + .custom(PersistentTasksCustomMetadata.TYPE); + PersistentTasksCustomMetadata.PersistentTask persistentTask = persistentTasksCustomMetadata.getTask( + reindexDataStreamTask.getPersistentTaskId() + ); + PersistentTaskState state = persistentTask.getState(); + final long completionTime; + if (state == null) { + completionTime = threadPool.absoluteTimeInMillis(); + reindexDataStreamTask.updatePersistentTaskState( + new ReindexDataStreamPersistentTaskState(completionTime), + ActionListener.noop() + ); + } else { + completionTime = ((ReindexDataStreamPersistentTaskState) state).completionTime(); + } + return TimeValue.timeValueMillis(TASK_KEEP_ALIVE_TIME.millis() - (threadPool.absoluteTimeInMillis() - completionTime)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java new file mode 100644 index 0000000000000..d6f32a3d34a7a --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskState.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskState; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamPersistentTaskState(long completionTime) implements Task.Status, PersistentTaskState { + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String COMPLETION_TIME_FIELD = "completion_time"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamPersistentTaskState((long) args[0]) + ); + static { + PARSER.declareLong(constructorArg(), new ParseField(COMPLETION_TIME_FIELD)); + } + + public ReindexDataStreamPersistentTaskState(StreamInput in) throws IOException { + this(in.readLong()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(completionTime); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(COMPLETION_TIME_FIELD, completionTime); + builder.endObject(); + return builder; + } + + public static ReindexDataStreamPersistentTaskState fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java new file mode 100644 index 0000000000000..10dfded853a13 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatus.java @@ -0,0 +1,95 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +public record ReindexDataStreamStatus( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + boolean complete, + Exception exception, + int inProgress, + int pending, + List> errors +) implements Task.Status { + public ReindexDataStreamStatus { + Objects.requireNonNull(errors); + } + + public static final String NAME = "ReindexDataStreamStatus"; + + public ReindexDataStreamStatus(StreamInput in) throws IOException { + this( + in.readLong(), + in.readInt(), + in.readInt(), + in.readBoolean(), + in.readException(), + in.readInt(), + in.readInt(), + in.readCollectionAsList(in1 -> Tuple.tuple(in1.readString(), in1.readException())) + ); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(persistentTaskStartTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + out.writeBoolean(complete); + out.writeException(exception); + out.writeInt(inProgress); + out.writeInt(pending); + out.writeCollection(errors, (out1, tuple) -> { + out1.writeString(tuple.v1()); + out1.writeException(tuple.v2()); + }); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("start_time", persistentTaskStartTime); + builder.field("complete", complete); + builder.field("total_indices", totalIndices); + builder.field("total_indices_requiring_upgrade", totalIndicesToBeUpgraded); + builder.field("successes", totalIndicesToBeUpgraded - (inProgress + pending + errors.size())); + builder.field("in_progress", inProgress); + builder.field("pending", pending); + builder.startArray("errors"); + for (Tuple error : errors) { + builder.startObject(); + builder.field("index", error.v1()); + builder.field("message", error.v2().getMessage()); + builder.endObject(); + } + builder.endArray(); + if (exception != null) { + builder.field("exception", exception.getMessage()); + } + builder.endObject(); + return builder; + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java new file mode 100644 index 0000000000000..2ae244679659f --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTask.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.core.Tuple; +import org.elasticsearch.persistent.AllocatedPersistentTask; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class ReindexDataStreamTask extends AllocatedPersistentTask { + public static final String TASK_NAME = "reindex-data-stream"; + private final long persistentTaskStartTime; + private final int totalIndices; + private final int totalIndicesToBeUpgraded; + private final ThreadPool threadPool; + private boolean complete = false; + private Exception exception; + private List inProgress = new ArrayList<>(); + private List pending = List.of(); + private List> errors = new ArrayList<>(); + + public ReindexDataStreamTask( + long persistentTaskStartTime, + int totalIndices, + int totalIndicesToBeUpgraded, + ThreadPool threadPool, + long id, + String type, + String action, + String description, + TaskId parentTask, + Map headers + ) { + super(id, type, action, description, parentTask, headers); + this.persistentTaskStartTime = persistentTaskStartTime; + this.totalIndices = totalIndices; + this.totalIndicesToBeUpgraded = totalIndicesToBeUpgraded; + this.threadPool = threadPool; + } + + @Override + public ReindexDataStreamStatus getStatus() { + return new ReindexDataStreamStatus( + persistentTaskStartTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress.size(), + pending.size(), + errors + ); + } + + public void reindexSucceeded() { + this.complete = true; + } + + public void reindexFailed(Exception e) { + this.complete = true; + this.exception = e; + } + + public void setInProgressIndices(List inProgressIndices) { + this.inProgress = inProgressIndices; + } + + public void setPendingIndices(List pendingIndices) { + this.pending = pendingIndices; + } + + public void addErrorIndex(String index, Exception error) { + this.errors.add(Tuple.tuple(index, error)); + } +} diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java new file mode 100644 index 0000000000000..5efbc6b672216 --- /dev/null +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParams.java @@ -0,0 +1,86 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.persistent.PersistentTaskParams; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + +public record ReindexDataStreamTaskParams(String sourceDataStream, long startTime, int totalIndices, int totalIndicesToBeUpgraded) + implements + PersistentTaskParams { + + public static final String NAME = ReindexDataStreamTask.TASK_NAME; + private static final String SOURCE_DATA_STREAM_FIELD = "source_data_stream"; + private static final String START_TIME_FIELD = "start_time"; + private static final String TOTAL_INDICES_FIELD = "total_indices"; + private static final String TOTAL_INDICES_TO_BE_UPGRADED_FIELD = "total_indices_to_be_upgraded"; + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + NAME, + true, + args -> new ReindexDataStreamTaskParams((String) args[0], (long) args[1], (int) args[2], (int) args[3]) + ); + static { + PARSER.declareString(constructorArg(), new ParseField(SOURCE_DATA_STREAM_FIELD)); + PARSER.declareLong(constructorArg(), new ParseField(START_TIME_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_FIELD)); + PARSER.declareInt(constructorArg(), new ParseField(TOTAL_INDICES_TO_BE_UPGRADED_FIELD)); + } + + public ReindexDataStreamTaskParams(StreamInput in) throws IOException { + this(in.readString(), in.readLong(), in.readInt(), in.readInt()); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.REINDEX_DATA_STREAMS; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(sourceDataStream); + out.writeLong(startTime); + out.writeInt(totalIndices); + out.writeInt(totalIndicesToBeUpgraded); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field(SOURCE_DATA_STREAM_FIELD, sourceDataStream) + .field(START_TIME_FIELD, startTime) + .field(TOTAL_INDICES_FIELD, totalIndices) + .field(TOTAL_INDICES_TO_BE_UPGRADED_FIELD, totalIndicesToBeUpgraded) + .endObject(); + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + public static ReindexDataStreamTaskParams fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java new file mode 100644 index 0000000000000..be11bff131909 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamPersistentTaskStateTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class ReindexDataStreamPersistentTaskStateTests extends AbstractXContentSerializingTestCase { + @Override + protected ReindexDataStreamPersistentTaskState doParseInstance(XContentParser parser) throws IOException { + return ReindexDataStreamPersistentTaskState.fromXContent(parser); + } + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamPersistentTaskState::new; + } + + @Override + protected ReindexDataStreamPersistentTaskState createTestInstance() { + return new ReindexDataStreamPersistentTaskState(randomNegativeLong()); + } + + @Override + protected ReindexDataStreamPersistentTaskState mutateInstance(ReindexDataStreamPersistentTaskState instance) throws IOException { + return new ReindexDataStreamPersistentTaskState(instance.completionTime() + 1); + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java new file mode 100644 index 0000000000000..8f0fabc2ce7ee --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamStatusTests.java @@ -0,0 +1,157 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import static java.util.Map.entry; +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamStatusTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamStatus::new; + } + + @Override + protected ReindexDataStreamStatus createTestInstance() { + return new ReindexDataStreamStatus( + randomLong(), + randomNegativeInt(), + randomNegativeInt(), + randomBoolean(), + nullableTestException(), + randomNegativeInt(), + randomNegativeInt(), + randomErrorList() + ); + } + + private Exception nullableTestException() { + if (randomBoolean()) { + return testException(); + } + return null; + } + + private Exception testException() { + /* + * Unfortunately ElasticsearchException doesn't have an equals and just falls back to Object::equals. So we can't test for equality + * when we're using an exception. So always just use null. + */ + return null; + } + + private List randomList() { + return randomList(0); + } + + private List randomList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> randomAlphaOfLength(50)); + } + + private List> randomErrorList() { + return randomErrorList(0); + } + + private List> randomErrorList(int minSize) { + return randomList(minSize, Math.max(minSize, 100), () -> Tuple.tuple(randomAlphaOfLength(30), testException())); + } + + @Override + protected ReindexDataStreamStatus mutateInstance(ReindexDataStreamStatus instance) throws IOException { + long startTime = instance.persistentTaskStartTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + boolean complete = instance.complete(); + Exception exception = instance.exception(); + int inProgress = instance.inProgress(); + int pending = instance.pending(); + List> errors = instance.errors(); + switch (randomIntBetween(0, 6)) { + case 0 -> startTime = randomLong(); + case 1 -> totalIndices = totalIndices + 1; + case 2 -> totalIndicesToBeUpgraded = totalIndicesToBeUpgraded + 1; + case 3 -> complete = complete == false; + case 4 -> inProgress = inProgress + 1; + case 5 -> pending = pending + 1; + case 6 -> errors = randomErrorList(errors.size() + 1); + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamStatus( + startTime, + totalIndices, + totalIndicesToBeUpgraded, + complete, + exception, + inProgress, + pending, + errors + ); + } + + public void testToXContent() throws IOException { + ReindexDataStreamStatus status = new ReindexDataStreamStatus( + 1234L, + 200, + 100, + true, + new ElasticsearchException("the whole task failed"), + 12, + 8, + List.of( + Tuple.tuple("index7", new ElasticsearchException("index7 failed")), + Tuple.tuple("index8", new ElasticsearchException("index8 " + "failed")) + ) + ); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + status.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat( + parserMap, + equalTo( + Map.ofEntries( + entry("start_time", 1234), + entry("total_indices", 200), + entry("total_indices_requiring_upgrade", 100), + entry("complete", true), + entry("exception", "the whole task failed"), + entry("successes", 78), + entry("in_progress", 12), + entry("pending", 8), + entry( + "errors", + List.of( + Map.of("index", "index7", "message", "index7 failed"), + Map.of("index", "index8", "message", "index8 failed") + ) + ) + ) + ) + ); + } + } + } +} diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java new file mode 100644 index 0000000000000..55098bf4a68d5 --- /dev/null +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/task/ReindexDataStreamTaskParamsTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.datastreams.task; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamTaskParamsTests extends AbstractXContentSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamTaskParams::new; + } + + @Override + protected ReindexDataStreamTaskParams createTestInstance() { + return new ReindexDataStreamTaskParams(randomAlphaOfLength(50), randomLong(), randomNonNegativeInt(), randomNonNegativeInt()); + } + + @Override + protected ReindexDataStreamTaskParams mutateInstance(ReindexDataStreamTaskParams instance) { + String sourceDataStream = instance.sourceDataStream(); + long startTime = instance.startTime(); + int totalIndices = instance.totalIndices(); + int totalIndicesToBeUpgraded = instance.totalIndicesToBeUpgraded(); + switch (randomIntBetween(0, 3)) { + case 0 -> sourceDataStream = randomAlphaOfLength(50); + case 1 -> startTime = randomLong(); + case 2 -> totalIndices = totalIndices + 1; + case 3 -> totalIndices = totalIndicesToBeUpgraded + 1; + default -> throw new UnsupportedOperationException(); + } + return new ReindexDataStreamTaskParams(sourceDataStream, startTime, totalIndices, totalIndicesToBeUpgraded); + } + + @Override + protected ReindexDataStreamTaskParams doParseInstance(XContentParser parser) { + return ReindexDataStreamTaskParams.fromXContent(parser); + } + + public void testToXContent() throws IOException { + ReindexDataStreamTaskParams params = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + params.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + Map parserMap = parser.map(); + assertThat(parserMap.get("source_data_stream"), equalTo(params.sourceDataStream())); + assertThat(((Number) parserMap.get("start_time")).longValue(), equalTo(params.startTime())); + } + } + } +} diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt index e76db7cfb1d26..5a1d8c002aa17 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/org.elasticsearch.script.score.txt @@ -50,5 +50,7 @@ static_import { double cosineSimilarity(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$CosineSimilarity double dotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$DotProduct double hamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.VectorScoreScriptUtils$Hamming + double maxSimDotProduct(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimDotProduct + double maxSimInvHamming(org.elasticsearch.script.ScoreScript, Object, String) bound_to org.elasticsearch.script.MultiVectorScoreScriptUtils$MaxSimInvHamming } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml new file mode 100644 index 0000000000000..caa7c59ab4c42 --- /dev/null +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/141_multi_dense_vector_max_sim.yml @@ -0,0 +1,206 @@ +setup: + - requires: + capabilities: + - method: POST + path: /_search + capabilities: [ multi_dense_vector_script_max_sim ] + test_runner_features: capabilities + reason: "Support for multi dense vector max-sim functions capability required" + - skip: + features: headers + + - do: + indices.create: + index: test-index + body: + settings: + number_of_shards: 1 + mappings: + properties: + vector: + type: multi_dense_vector + dims: 5 + byte_vector: + type: multi_dense_vector + dims: 5 + element_type: byte + bit_vector: + type: multi_dense_vector + dims: 40 + element_type: bit + - do: + index: + index: test-index + id: "1" + body: + vector: [[230.0, 300.33, -34.8988, 15.555, -200.0], [-0.5, 100.0, -13, 14.8, -156.0]] + byte_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + bit_vector: [[8, 5, -15, 1, -7], [-1, 115, -3, 4, -128]] + + - do: + index: + index: test-index + id: "3" + body: + vector: [[0.5, 111.3, -13.0, 14.8, -156.0]] + byte_vector: [[2, 18, -5, 0, -124]] + bit_vector: [[2, 18, -5, 0, -124]] + + - do: + indices.refresh: {} +--- +"Test max-sim dot product scoring": + - skip: + features: close_to + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 611.316, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 68.90001, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 230, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 33, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 0]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 3, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 2, error: 0.01}} + +# doing max-sim dot product with a vector where the stored bit vectors are used as masks + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimDotProduct(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]] + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "1"} + - close_to: {hits.hits.0._score: {value: 190, error: 0.01}} + + - match: {hits.hits.1._id: "3"} + - close_to: {hits.hits.1._score: {value: 125, error: 0.01}} +--- +"Test max-sim inv hamming scoring": + - skip: + features: close_to + + # inv hamming doesn't apply to float vectors + - do: + catch: bad_request + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'byte_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} + + - do: + headers: + Content-Type: application/json + search: + rest_total_hits_as_int: true + body: + query: + script_score: + query: {match_all: {} } + script: + source: "maxSimInvHamming(params.query_vector, 'bit_vector')" + params: + query_vector: [[1, 2, 1, 1, 1]] + + - match: {hits.total: 2} + + - match: {hits.hits.0._id: "3"} + - close_to: {hits.hits.0._score: {value: 0.675, error: 0.01}} + + - match: {hits.hits.1._id: "1"} + - close_to: {hits.hits.1._score: {value: 0.65, error: 0.01}} diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 3905edae46c2f..a4aa0514bb47a 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -208,7 +208,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -216,7 +216,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -224,7 +224,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -232,7 +232,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 1ee5519593569..3442e9dc43925 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -218,7 +218,7 @@ public void testMultipleFeatureMigration() throws Exception { // Finally, verify that all the indices exist and have the properties we expect. assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -226,7 +226,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -234,7 +234,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -242,7 +242,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, @@ -251,7 +251,7 @@ public void testMultipleFeatureMigration() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".second-int-man-old-reindexed-for-8", + ".second-int-man-old-reindexed-for-9", SECOND_FEATURE_IDX_FLAG_VALUE, true, true, diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java index e049d4cd372e6..61940be247861 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -112,7 +112,7 @@ public void testThrottleResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numThrottles + 1) .withThrottles(numThrottles) .withExceptions(numThrottles) @@ -137,7 +137,7 @@ public void testRangeNotSatisfiedAreCountedInMetrics() throws IOException { assertThrows(RequestedRangeNotSatisfiedException.class, () -> blobContainer.readBlob(purpose, blobName)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB, repository).expectMetrics() .withRequests(1) .withThrottles(0) .withExceptions(1) @@ -170,7 +170,7 @@ public void testErrorResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numErrors + 1) .withThrottles(throttles.get()) .withExceptions(numErrors) @@ -191,7 +191,7 @@ public void testRequestFailuresAreCountedInMetrics() { assertThrows(IOException.class, () -> blobContainer.listBlobs(purpose)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS, repository).expectMetrics() .withRequests(4) .withThrottles(0) .withExceptions(4) @@ -322,14 +322,20 @@ private void clearMetrics(String discoveryNode) { .forEach(TestTelemetryPlugin::resetMeter); } - private MetricsAsserter metricsAsserter(String dataNodeName, OperationPurpose operationPurpose, AzureBlobStore.Operation operation) { - return new MetricsAsserter(dataNodeName, operationPurpose, operation); + private MetricsAsserter metricsAsserter( + String dataNodeName, + OperationPurpose operationPurpose, + AzureBlobStore.Operation operation, + String repository + ) { + return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository); } private class MetricsAsserter { private final String dataNodeName; private final OperationPurpose purpose; private final AzureBlobStore.Operation operation; + private final String repository; enum Result { Success, @@ -355,10 +361,11 @@ List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, Strin abstract List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name); } - private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation) { + private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation, String repository) { this.dataNodeName = dataNodeName; this.purpose = purpose; this.operation = operation; + this.repository = repository; } private class Expectations { @@ -451,6 +458,7 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa .filter( m -> m.attributes().get("operation").equals(operation.getKey()) && m.attributes().get("purpose").equals(purpose.getKey()) + && m.attributes().get("repo_name").equals(repository) && m.attributes().get("repo_type").equals("azure") ) .findFirst() @@ -462,6 +470,8 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa + operation.getKey() + " and purpose=" + purpose.getKey() + + " and repo_name=" + + repository + " in " + measurements ) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index ab3f3ee4f3728..bd21f208faac4 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -402,7 +402,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final AzureBlobStore.Operation operation = AzureBlobStore.Operation.fromKey((String) metric.attributes().get("operation")); final AzureBlobStore.StatsKey statsKey = new AzureBlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index d9480abf21687..bb8a452e21771 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -300,7 +300,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation")); final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 7407522651e55..da357dc09ab95 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -327,6 +327,8 @@ private Map metricAttributes(String action) { return Map.of( "repo_type", S3Repository.TYPE, + "repo_name", + blobStore.getRepositoryMetadata().name(), "operation", Operation.GET_OBJECT.getKey(), "purpose", diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index ac49cffc1e0da..b292dc5872994 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -1106,7 +1106,7 @@ private List getRetryHistogramMeasurements() { } private Map metricAttributes(String action) { - return Map.of("repo_type", "s3", "operation", "GetObject", "purpose", "Indices", "action", action); + return Map.of("repo_type", "s3", "repo_name", "repository", "operation", "GetObject", "purpose", "Indices", "action", action); } /** diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 3095139ca4685..4bb27af4bd0f5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -699,11 +699,6 @@ public Collection getRestHandlers( Predicate clusterSupportsFeature ) { return List.of(new BaseRestHandler() { - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public String getName() { return ROUTE; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java new file mode 100644 index 0000000000000..18c91068ff4f9 --- /dev/null +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.HttpResponseStatus; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class Netty4TrashingAllocatorIT extends ESNetty4IntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.concatLists(List.of(Handler.class), super.nodePlugins()); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testTrashContent() throws InterruptedException { + try (var client = new Netty4HttpClient()) { + var addr = randomFrom(internalCluster().getInstance(HttpServerTransport.class).boundAddress().boundAddresses()).address(); + var content = randomAlphaOfLength(between(1024, 2048)); + var responses = client.post(addr, List.of(new Tuple<>(Handler.ROUTE, content))); + assertEquals(HttpResponseStatus.OK, responses.stream().findFirst().get().status()); + } + } + + public static class Handler extends Plugin implements ActionPlugin { + static final String ROUTE = "/_test/trashing-alloc"; + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return ROUTE; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + var content = request.releasableContent(); + var iter = content.iterator(); + return (chan) -> { + request.getHttpRequest().release(); + assertFalse(content.hasReferences()); + BytesRef br; + while ((br = iter.next()) != null) { + for (int i = br.offset; i < br.offset + br.length; i++) { + if (br.bytes[i] != 0) { + fail( + new AssertionError( + "buffer is not trashed, off=" + + br.offset + + " len=" + + br.length + + " pos=" + + i + + " ind=" + + (i - br.offset) + ) + ); + } + } + } + chan.sendResponse(new RestResponse(RestStatus.OK, "")); + }; + } + }); + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java index a1aa211814520..2662ddf7e1440 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.http.netty4; -import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.EmptyHttpHeaders; @@ -128,39 +127,6 @@ public void release() { } } - @Override - public HttpRequest releaseAndCopy() { - assert released.get() == false; - if (pooled == false) { - return this; - } - try { - final ByteBuf copiedContent = Unpooled.copiedBuffer(request.content()); - HttpBody newContent; - if (content.isStream()) { - newContent = content; - } else { - newContent = Netty4Utils.fullHttpBodyFrom(copiedContent); - } - return new Netty4HttpRequest( - sequence, - new DefaultFullHttpRequest( - request.protocolVersion(), - request.method(), - request.uri(), - copiedContent, - request.headers(), - request.trailingHeaders() - ), - new AtomicBoolean(false), - false, - newContent - ); - } finally { - release(); - } - } - @Override public final Map> getHeaders() { return headers; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 459b6c77be8c3..81b4fd3fbb9ee 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -179,7 +179,7 @@ public boolean hasReferences() { } public static HttpBody.Full fullHttpBodyFrom(final ByteBuf buf) { - return new HttpBody.ByteRefHttpBody(toBytesReference(buf)); + return new HttpBody.ByteRefHttpBody(toReleasableBytesReference(buf)); } public static Recycler createRecycler(Settings settings) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java index ab38b5f0c4c8c..1eb7e13889338 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java @@ -24,9 +24,11 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Booleans; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; public class NettyAllocator { @@ -44,8 +46,9 @@ public class NettyAllocator { private static final String USE_NETTY_DEFAULT_CHUNK = "es.unsafe.use_netty_default_chunk_and_page_size"; static { + ByteBufAllocator allocator; if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) { - ALLOCATOR = ByteBufAllocator.DEFAULT; + allocator = ByteBufAllocator.DEFAULT; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + ByteSizeValue.ofBytes(SUGGESTED_MAX_ALLOCATION_SIZE) @@ -127,7 +130,12 @@ public class NettyAllocator { + g1gcRegionSize + "}]"; } - ALLOCATOR = new NoDirectBuffers(delegate); + allocator = new NoDirectBuffers(delegate); + } + if (Assertions.ENABLED) { + ALLOCATOR = new TrashingByteBufAllocator(allocator); + } else { + ALLOCATOR = allocator; } RECYCLER = new Recycler<>() { @@ -353,4 +361,105 @@ public ByteBufAllocator getDelegate() { return delegate; } } + + static class TrashingByteBuf extends WrappedByteBuf { + + private boolean trashed = false; + + protected TrashingByteBuf(ByteBuf buf) { + super(buf); + } + + @Override + public boolean release() { + if (refCnt() == 1) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(); + } + + @Override + public boolean release(int decrement) { + if (refCnt() == decrement && refCnt() > 0) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(decrement); + } + + // [NOTE on racy trashContent() calls]: We trash the buffer content _before_ reducing the ref + // count to zero, which looks racy because in principle a concurrent caller could come along + // and successfully retain() this buffer to keep it alive after it's been trashed. Such a + // caller would sometimes get an IllegalReferenceCountException ofc but that's something it + // could handle - see for instance org.elasticsearch.transport.netty4.Netty4Utils.ByteBufRefCounted.tryIncRef. + // Yet in practice this should never happen, we only ever retain() these buffers while we + // know them to be alive (i.e. via RefCounted#mustIncRef or its moral equivalents) so it'd + // be a bug for a caller to retain() a buffer whose ref count is heading to zero and whose + // contents we've already decided to trash. + private void trashContent() { + if (trashed == false) { + trashed = true; + TrashingByteBufAllocator.trashBuffer(buf); + } + } + } + + static class TrashingCompositeByteBuf extends CompositeByteBuf { + + TrashingCompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents) { + super(alloc, direct, maxNumComponents); + } + + @Override + protected void deallocate() { + TrashingByteBufAllocator.trashBuffer(this); + super.deallocate(); + } + } + + static class TrashingByteBufAllocator extends NoDirectBuffers { + + static int DEFAULT_MAX_COMPONENTS = 16; + + static void trashBuffer(ByteBuf buf) { + for (var nioBuf : buf.nioBuffers()) { + if (nioBuf.hasArray()) { + var from = nioBuf.arrayOffset() + nioBuf.position(); + var to = from + nioBuf.remaining(); + Arrays.fill(nioBuf.array(), from, to, (byte) 0); + } + } + } + + TrashingByteBufAllocator(ByteBufAllocator delegate) { + super(delegate); + } + + @Override + public ByteBuf heapBuffer() { + return new TrashingByteBuf(super.heapBuffer()); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity)); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity, int maxCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity, maxCapacity)); + } + + @Override + public CompositeByteBuf compositeHeapBuffer() { + return new TrashingCompositeByteBuf(this, false, DEFAULT_MAX_COMPONENTS); + } + + @Override + public CompositeByteBuf compositeHeapBuffer(int maxNumComponents) { + return new TrashingCompositeByteBuf(this, false, maxNumComponents); + } + + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java new file mode 100644 index 0000000000000..50841cec000f1 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java @@ -0,0 +1,1036 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.util.ByteProcessor; +import io.netty.util.internal.ObjectUtil; +import io.netty.util.internal.StringUtil; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.FileChannel; +import java.nio.channels.GatheringByteChannel; +import java.nio.channels.ScatteringByteChannel; +import java.nio.charset.Charset; + +/** + * A copy of Netty's WrappedByteBuf. + */ +class WrappedByteBuf extends ByteBuf { + + protected final ByteBuf buf; + + protected WrappedByteBuf(ByteBuf buf) { + this.buf = ObjectUtil.checkNotNull(buf, "buf"); + } + + @Override + public final boolean hasMemoryAddress() { + return buf.hasMemoryAddress(); + } + + @Override + public boolean isContiguous() { + return buf.isContiguous(); + } + + @Override + public final long memoryAddress() { + return buf.memoryAddress(); + } + + @Override + public final int capacity() { + return buf.capacity(); + } + + @Override + public ByteBuf capacity(int newCapacity) { + buf.capacity(newCapacity); + return this; + } + + @Override + public final int maxCapacity() { + return buf.maxCapacity(); + } + + @Override + public final ByteBufAllocator alloc() { + return buf.alloc(); + } + + @Override + public final ByteOrder order() { + return buf.order(); + } + + @Override + public ByteBuf order(ByteOrder endianness) { + return buf.order(endianness); + } + + @Override + public final ByteBuf unwrap() { + return buf; + } + + @Override + public ByteBuf asReadOnly() { + return buf.asReadOnly(); + } + + @Override + public boolean isReadOnly() { + return buf.isReadOnly(); + } + + @Override + public final boolean isDirect() { + return buf.isDirect(); + } + + @Override + public final int readerIndex() { + return buf.readerIndex(); + } + + @Override + public final ByteBuf readerIndex(int readerIndex) { + buf.readerIndex(readerIndex); + return this; + } + + @Override + public final int writerIndex() { + return buf.writerIndex(); + } + + @Override + public final ByteBuf writerIndex(int writerIndex) { + buf.writerIndex(writerIndex); + return this; + } + + @Override + public ByteBuf setIndex(int readerIndex, int writerIndex) { + buf.setIndex(readerIndex, writerIndex); + return this; + } + + @Override + public final int readableBytes() { + return buf.readableBytes(); + } + + @Override + public final int writableBytes() { + return buf.writableBytes(); + } + + @Override + public final int maxWritableBytes() { + return buf.maxWritableBytes(); + } + + @Override + public int maxFastWritableBytes() { + return buf.maxFastWritableBytes(); + } + + @Override + public final boolean isReadable() { + return buf.isReadable(); + } + + @Override + public final boolean isWritable() { + return buf.isWritable(); + } + + @Override + public final ByteBuf clear() { + buf.clear(); + return this; + } + + @Override + public final ByteBuf markReaderIndex() { + buf.markReaderIndex(); + return this; + } + + @Override + public final ByteBuf resetReaderIndex() { + buf.resetReaderIndex(); + return this; + } + + @Override + public final ByteBuf markWriterIndex() { + buf.markWriterIndex(); + return this; + } + + @Override + public final ByteBuf resetWriterIndex() { + buf.resetWriterIndex(); + return this; + } + + @Override + public ByteBuf discardReadBytes() { + buf.discardReadBytes(); + return this; + } + + @Override + public ByteBuf discardSomeReadBytes() { + buf.discardSomeReadBytes(); + return this; + } + + @Override + public ByteBuf ensureWritable(int minWritableBytes) { + buf.ensureWritable(minWritableBytes); + return this; + } + + @Override + public int ensureWritable(int minWritableBytes, boolean force) { + return buf.ensureWritable(minWritableBytes, force); + } + + @Override + public boolean getBoolean(int index) { + return buf.getBoolean(index); + } + + @Override + public byte getByte(int index) { + return buf.getByte(index); + } + + @Override + public short getUnsignedByte(int index) { + return buf.getUnsignedByte(index); + } + + @Override + public short getShort(int index) { + return buf.getShort(index); + } + + @Override + public short getShortLE(int index) { + return buf.getShortLE(index); + } + + @Override + public int getUnsignedShort(int index) { + return buf.getUnsignedShort(index); + } + + @Override + public int getUnsignedShortLE(int index) { + return buf.getUnsignedShortLE(index); + } + + @Override + public int getMedium(int index) { + return buf.getMedium(index); + } + + @Override + public int getMediumLE(int index) { + return buf.getMediumLE(index); + } + + @Override + public int getUnsignedMedium(int index) { + return buf.getUnsignedMedium(index); + } + + @Override + public int getUnsignedMediumLE(int index) { + return buf.getUnsignedMediumLE(index); + } + + @Override + public int getInt(int index) { + return buf.getInt(index); + } + + @Override + public int getIntLE(int index) { + return buf.getIntLE(index); + } + + @Override + public long getUnsignedInt(int index) { + return buf.getUnsignedInt(index); + } + + @Override + public long getUnsignedIntLE(int index) { + return buf.getUnsignedIntLE(index); + } + + @Override + public long getLong(int index) { + return buf.getLong(index); + } + + @Override + public long getLongLE(int index) { + return buf.getLongLE(index); + } + + @Override + public char getChar(int index) { + return buf.getChar(index); + } + + @Override + public float getFloat(int index) { + return buf.getFloat(index); + } + + @Override + public double getDouble(int index) { + return buf.getDouble(index); + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int length) { + buf.getBytes(index, dst, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuffer dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { + buf.getBytes(index, out, length); + return this; + } + + @Override + public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { + return buf.getBytes(index, out, length); + } + + @Override + public int getBytes(int index, FileChannel out, long position, int length) throws IOException { + return buf.getBytes(index, out, position, length); + } + + @Override + public CharSequence getCharSequence(int index, int length, Charset charset) { + return buf.getCharSequence(index, length, charset); + } + + @Override + public ByteBuf setBoolean(int index, boolean value) { + buf.setBoolean(index, value); + return this; + } + + @Override + public ByteBuf setByte(int index, int value) { + buf.setByte(index, value); + return this; + } + + @Override + public ByteBuf setShort(int index, int value) { + buf.setShort(index, value); + return this; + } + + @Override + public ByteBuf setShortLE(int index, int value) { + buf.setShortLE(index, value); + return this; + } + + @Override + public ByteBuf setMedium(int index, int value) { + buf.setMedium(index, value); + return this; + } + + @Override + public ByteBuf setMediumLE(int index, int value) { + buf.setMediumLE(index, value); + return this; + } + + @Override + public ByteBuf setInt(int index, int value) { + buf.setInt(index, value); + return this; + } + + @Override + public ByteBuf setIntLE(int index, int value) { + buf.setIntLE(index, value); + return this; + } + + @Override + public ByteBuf setLong(int index, long value) { + buf.setLong(index, value); + return this; + } + + @Override + public ByteBuf setLongLE(int index, long value) { + buf.setLongLE(index, value); + return this; + } + + @Override + public ByteBuf setChar(int index, int value) { + buf.setChar(index, value); + return this; + } + + @Override + public ByteBuf setFloat(int index, float value) { + buf.setFloat(index, value); + return this; + } + + @Override + public ByteBuf setDouble(int index, double value) { + buf.setDouble(index, value); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int length) { + buf.setBytes(index, src, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuffer src) { + buf.setBytes(index, src); + return this; + } + + @Override + public int setBytes(int index, InputStream in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, FileChannel in, long position, int length) throws IOException { + return buf.setBytes(index, in, position, length); + } + + @Override + public ByteBuf setZero(int index, int length) { + buf.setZero(index, length); + return this; + } + + @Override + public int setCharSequence(int index, CharSequence sequence, Charset charset) { + return buf.setCharSequence(index, sequence, charset); + } + + @Override + public boolean readBoolean() { + return buf.readBoolean(); + } + + @Override + public byte readByte() { + return buf.readByte(); + } + + @Override + public short readUnsignedByte() { + return buf.readUnsignedByte(); + } + + @Override + public short readShort() { + return buf.readShort(); + } + + @Override + public short readShortLE() { + return buf.readShortLE(); + } + + @Override + public int readUnsignedShort() { + return buf.readUnsignedShort(); + } + + @Override + public int readUnsignedShortLE() { + return buf.readUnsignedShortLE(); + } + + @Override + public int readMedium() { + return buf.readMedium(); + } + + @Override + public int readMediumLE() { + return buf.readMediumLE(); + } + + @Override + public int readUnsignedMedium() { + return buf.readUnsignedMedium(); + } + + @Override + public int readUnsignedMediumLE() { + return buf.readUnsignedMediumLE(); + } + + @Override + public int readInt() { + return buf.readInt(); + } + + @Override + public int readIntLE() { + return buf.readIntLE(); + } + + @Override + public long readUnsignedInt() { + return buf.readUnsignedInt(); + } + + @Override + public long readUnsignedIntLE() { + return buf.readUnsignedIntLE(); + } + + @Override + public long readLong() { + return buf.readLong(); + } + + @Override + public long readLongLE() { + return buf.readLongLE(); + } + + @Override + public char readChar() { + return buf.readChar(); + } + + @Override + public float readFloat() { + return buf.readFloat(); + } + + @Override + public double readDouble() { + return buf.readDouble(); + } + + @Override + public ByteBuf readBytes(int length) { + return buf.readBytes(length); + } + + @Override + public ByteBuf readSlice(int length) { + return buf.readSlice(length); + } + + @Override + public ByteBuf readRetainedSlice(int length) { + return buf.readRetainedSlice(length); + } + + @Override + public ByteBuf readBytes(ByteBuf dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int length) { + buf.readBytes(dst, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuffer dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(OutputStream out, int length) throws IOException { + buf.readBytes(out, length); + return this; + } + + @Override + public int readBytes(GatheringByteChannel out, int length) throws IOException { + return buf.readBytes(out, length); + } + + @Override + public int readBytes(FileChannel out, long position, int length) throws IOException { + return buf.readBytes(out, position, length); + } + + @Override + public CharSequence readCharSequence(int length, Charset charset) { + return buf.readCharSequence(length, charset); + } + + @Override + public ByteBuf skipBytes(int length) { + buf.skipBytes(length); + return this; + } + + @Override + public ByteBuf writeBoolean(boolean value) { + buf.writeBoolean(value); + return this; + } + + @Override + public ByteBuf writeByte(int value) { + buf.writeByte(value); + return this; + } + + @Override + public ByteBuf writeShort(int value) { + buf.writeShort(value); + return this; + } + + @Override + public ByteBuf writeShortLE(int value) { + buf.writeShortLE(value); + return this; + } + + @Override + public ByteBuf writeMedium(int value) { + buf.writeMedium(value); + return this; + } + + @Override + public ByteBuf writeMediumLE(int value) { + buf.writeMediumLE(value); + return this; + } + + @Override + public ByteBuf writeInt(int value) { + buf.writeInt(value); + return this; + } + + @Override + public ByteBuf writeIntLE(int value) { + buf.writeIntLE(value); + return this; + } + + @Override + public ByteBuf writeLong(long value) { + buf.writeLong(value); + return this; + } + + @Override + public ByteBuf writeLongLE(long value) { + buf.writeLongLE(value); + return this; + } + + @Override + public ByteBuf writeChar(int value) { + buf.writeChar(value); + return this; + } + + @Override + public ByteBuf writeFloat(float value) { + buf.writeFloat(value); + return this; + } + + @Override + public ByteBuf writeDouble(double value) { + buf.writeDouble(value); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int length) { + buf.writeBytes(src, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuffer src) { + buf.writeBytes(src); + return this; + } + + @Override + public int writeBytes(InputStream in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(ScatteringByteChannel in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(FileChannel in, long position, int length) throws IOException { + return buf.writeBytes(in, position, length); + } + + @Override + public ByteBuf writeZero(int length) { + buf.writeZero(length); + return this; + } + + @Override + public int writeCharSequence(CharSequence sequence, Charset charset) { + return buf.writeCharSequence(sequence, charset); + } + + @Override + public int indexOf(int fromIndex, int toIndex, byte value) { + return buf.indexOf(fromIndex, toIndex, value); + } + + @Override + public int bytesBefore(byte value) { + return buf.bytesBefore(value); + } + + @Override + public int bytesBefore(int length, byte value) { + return buf.bytesBefore(length, value); + } + + @Override + public int bytesBefore(int index, int length, byte value) { + return buf.bytesBefore(index, length, value); + } + + @Override + public int forEachByte(ByteProcessor processor) { + return buf.forEachByte(processor); + } + + @Override + public int forEachByte(int index, int length, ByteProcessor processor) { + return buf.forEachByte(index, length, processor); + } + + @Override + public int forEachByteDesc(ByteProcessor processor) { + return buf.forEachByteDesc(processor); + } + + @Override + public int forEachByteDesc(int index, int length, ByteProcessor processor) { + return buf.forEachByteDesc(index, length, processor); + } + + @Override + public ByteBuf copy() { + return buf.copy(); + } + + @Override + public ByteBuf copy(int index, int length) { + return buf.copy(index, length); + } + + @Override + public ByteBuf slice() { + return buf.slice(); + } + + @Override + public ByteBuf retainedSlice() { + return buf.retainedSlice(); + } + + @Override + public ByteBuf slice(int index, int length) { + return buf.slice(index, length); + } + + @Override + public ByteBuf retainedSlice(int index, int length) { + return buf.retainedSlice(index, length); + } + + @Override + public ByteBuf duplicate() { + return buf.duplicate(); + } + + @Override + public ByteBuf retainedDuplicate() { + return buf.retainedDuplicate(); + } + + @Override + public int nioBufferCount() { + return buf.nioBufferCount(); + } + + @Override + public ByteBuffer nioBuffer() { + return buf.nioBuffer(); + } + + @Override + public ByteBuffer nioBuffer(int index, int length) { + return buf.nioBuffer(index, length); + } + + @Override + public ByteBuffer[] nioBuffers() { + return buf.nioBuffers(); + } + + @Override + public ByteBuffer[] nioBuffers(int index, int length) { + return buf.nioBuffers(index, length); + } + + @Override + public ByteBuffer internalNioBuffer(int index, int length) { + return buf.internalNioBuffer(index, length); + } + + @Override + public boolean hasArray() { + return buf.hasArray(); + } + + @Override + public byte[] array() { + return buf.array(); + } + + @Override + public int arrayOffset() { + return buf.arrayOffset(); + } + + @Override + public String toString(Charset charset) { + return buf.toString(charset); + } + + @Override + public String toString(int index, int length, Charset charset) { + return buf.toString(index, length, charset); + } + + @Override + public int hashCode() { + return buf.hashCode(); + } + + @Override + @SuppressWarnings("EqualsWhichDoesntCheckParameterClass") + public boolean equals(Object obj) { + return buf.equals(obj); + } + + @Override + public int compareTo(ByteBuf buffer) { + return buf.compareTo(buffer); + } + + @Override + public String toString() { + return StringUtil.simpleClassName(this) + '(' + buf.toString() + ')'; + } + + @Override + public ByteBuf retain(int increment) { + buf.retain(increment); + return this; + } + + @Override + public ByteBuf retain() { + buf.retain(); + return this; + } + + @Override + public ByteBuf touch() { + buf.touch(); + return this; + } + + @Override + public ByteBuf touch(Object hint) { + buf.touch(hint); + return this; + } + + @Override + public final boolean isReadable(int size) { + return buf.isReadable(size); + } + + @Override + public final boolean isWritable(int size) { + return buf.isWritable(size); + } + + @Override + public final int refCnt() { + return buf.refCnt(); + } + + @Override + public boolean release() { + return buf.release(); + } + + @Override + public boolean release(int decrement) { + return buf.release(decrement); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java new file mode 100644 index 0000000000000..a76eb9fa4875b --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.Unpooled; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; + +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBuf; +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBufAllocator; + +public class NettyAllocatorTests extends ESTestCase { + + static void assertBufferTrashed(BytesReference bytesRef) throws IOException { + var iter = bytesRef.iterator(); + BytesRef br; + while ((br = iter.next()) != null) { + for (var i = br.offset; i < br.offset + br.length; i++) { + assertEquals("off=" + br.offset + " len=" + br.length + " i=" + i, 0, br.bytes[i]); + } + } + } + + public void testTrashArrayByteBuf() { + var arr = randomByteArrayOfLength(between(1024, 2048)); + var buf = Unpooled.wrappedBuffer(arr); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + var emptyArr = new byte[arr.length]; + assertArrayEquals(emptyArr, arr); + } + + public void testNioBufsTrashingByteBuf() { + var arrCnt = between(1, 16); + var byteArrs = new byte[arrCnt][]; + var byteBufs = new ByteBuffer[arrCnt]; + for (var i = 0; i < arrCnt; i++) { + byteArrs[i] = randomByteArrayOfLength(between(1024, 2048)); + byteBufs[i] = ByteBuffer.wrap(byteArrs[i]); + } + var buf = Unpooled.wrappedBuffer(byteBufs); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + for (int i = 0; i < arrCnt; i++) { + for (int j = 0; j < byteArrs[i].length; j++) { + assertEquals(0, byteArrs[i][j]); + } + } + } + + public void testNioBufOffsetTrashingByteBuf() { + var arr = randomByteArrayOfLength(1024); + var off = 1; + var len = arr.length - 2; + arr[0] = 1; + arr[arr.length - 1] = 1; + var buf = Unpooled.wrappedBuffer(arr, off, len); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + assertEquals(1, arr[0]); + assertEquals(1, arr[arr.length - 1]); + for (int i = 1; i < arr.length - 1; i++) { + assertEquals("at index " + i, 0, arr[i]); + } + } + + public void testTrashingByteBufAllocator() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var size = between(1024 * 1024, 10 * 1024 * 1024); + + // use 3 different heap allocation methods + for (var buf : List.of(alloc.heapBuffer(), alloc.heapBuffer(1024), alloc.heapBuffer(1024, size))) { + buf.writeBytes(randomByteArrayOfLength(size)); + var bytesRef = Netty4Utils.toBytesReference(buf); + buf.release(); + assertBufferTrashed(bytesRef); + } + } + + public void testTrashingCompositeByteBuf() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var compBuf = alloc.compositeHeapBuffer(); + for (var i = 0; i < between(1, 10); i++) { + var buf = alloc.heapBuffer().writeBytes(randomByteArrayOfLength(between(1024, 8192))); + compBuf.addComponent(true, buf); + } + var bytesRef = Netty4Utils.toBytesReference(compBuf); + compBuf.release(); + assertBufferTrashed(bytesRef); + } + +} diff --git a/muted-tests.yml b/muted-tests.yml index 2216873a3a265..8be390e670c9b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -162,9 +162,6 @@ tests: - class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT method: testDeprecatedSettingsReturnWarnings issue: https://github.com/elastic/elasticsearch/issues/108628 -- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT - method: testEveryActionIsEitherOperatorOnlyOrNonOperator - issue: https://github.com/elastic/elasticsearch/issues/102992 - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/116249 @@ -211,42 +208,42 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/116777 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeWithinAggregations - issue: https://github.com/elastic/elasticsearch/issues/116856 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeSingleGrouping - issue: https://github.com/elastic/elasticsearch/issues/116857 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeNestedGrouping - issue: https://github.com/elastic/elasticsearch/issues/116858 - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - method: test {p0=inference/30_semantic_text_inference/Calculates embeddings using the default ELSER 2 endpoint} - issue: https://github.com/elastic/elasticsearch/issues/116542 -- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests - method: testTermQuery - issue: https://github.com/elastic/elasticsearch/issues/116879 -- class: org.elasticsearch.xpack.inference.InferenceRestIT - issue: https://github.com/elastic/elasticsearch/issues/116899 - class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT method: testQueryBuilderBWC {p0=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116989 - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 -- class: org.elasticsearch.discovery.ClusterDisruptionIT - method: testAckedIndexing - issue: https://github.com/elastic/elasticsearch/issues/117024 - class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT method: test {yaml=reference/esql/esql-across-clusters/line_197} issue: https://github.com/elastic/elasticsearch/issues/117099 - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT method: test {yaml=/10_apm/Test template reinstallation} issue: https://github.com/elastic/elasticsearch/issues/116445 - +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testMultipleInferencesTriggeringDownloadAndDeploy + issue: https://github.com/elastic/elasticsearch/issues/117208 +- class: org.elasticsearch.xpack.logsdb.qa.StandardVersusLogsStoredSourceChallengeRestIT + method: testEsqlSource + issue: https://github.com/elastic/elasticsearch/issues/117212 +- class: org.elasticsearch.ingest.geoip.EnterpriseGeoIpDownloaderIT + method: testEnterpriseDownloaderTask + issue: https://github.com/elastic/elasticsearch/issues/115163 +- class: org.elasticsearch.versioning.ConcurrentSeqNoVersioningIT + method: testSeqNoCASLinearizability + issue: https://github.com/elastic/elasticsearch/issues/117249 +- class: org.elasticsearch.discovery.ClusterDisruptionIT + method: testAckedIndexing + issue: https://github.com/elastic/elasticsearch/issues/117024 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} + issue: https://github.com/elastic/elasticsearch/issues/117295 +- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests + method: testRetryPointInTime + issue: https://github.com/elastic/elasticsearch/issues/117116 # Examples: # diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a166a89ad4026..f281db5279660 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -77,6 +77,7 @@ tasks.register("writeTestJavaPolicy") { "permission java.security.SecurityPermission \"getProperty.jdk.tls.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.jdk.certpath.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.keystore.type.compat\";", + "permission java.security.SecurityPermission \"getProperty.org.bouncycastle.ec.max_f2m_field_size\";", "};" ].join("\n") ) diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle index aad59be376262..5f0fee6636256 100644 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle @@ -8,7 +8,6 @@ */ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin @@ -55,8 +54,9 @@ tasks.named("yamlRestTest").configure { enabled = false } ['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> TaskProvider fixture = tasks.register("ec2Fixture${action}", AntFixture) { dependsOn project.sourceSets.yamlRestTest.runtimeClasspath - env 'CLASSPATH', "${-> project.sourceSets.yamlRestTest.runtimeClasspath.asPath}" - executable = "${buildParams.runtimeJavaHome.get()}/bin/java" + FileCollection cp = project.sourceSets.yamlRestTest.runtimeClasspath + env 'CLASSPATH', "${-> cp.asPath}" + executable = "${buildParams.runtimeJavaHome.get() }/bin/java" args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/yamlRestTest${action}-1/config/unicast_hosts.txt" } @@ -68,9 +68,18 @@ tasks.named("yamlRestTest").configure { enabled = false } classpath = yamlRestTestSourceSet.getRuntimeClasspath() } + if(action == 'ContainerCredentials') { + def addressAndPortSource = fixture.get().addressAndPortSource + testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { + environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', + () -> addressAndPortSource.map{ addr -> "http://${addr}/ecs_credentials_endpoint" }.get(), IGNORE_VALUE + } + } + tasks.named("check").configure { dependsOn(yamlRestTestTask) } + def addressAndPortSource = fixture.get().addressAndPortSource testClusters.matching { it.name == yamlRestTestTask.name}.configureEach { numberOfNodes = ec2NumberOfNodes @@ -78,9 +87,9 @@ tasks.named("yamlRestTest").configure { enabled = false } setting 'discovery.seed_providers', 'ec2' setting 'network.host', '_ec2_' - setting 'discovery.ec2.endpoint', { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + setting 'discovery.ec2.endpoint', { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE - systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> fixture.get().addressAndPort}" }, IGNORE_VALUE + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE } } @@ -107,11 +116,6 @@ tasks.named("ec2FixtureContainerCredentials").configure { env 'ACTIVATE_CONTAINER_CREDENTIALS', true } -testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { - environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', - { "http://${-> tasks.findByName("ec2FixtureContainerCredentials").addressAndPort}/ecs_credentials_endpoint" }, IGNORE_VALUE -} - // Extra config for InstanceProfile tasks.named("ec2FixtureInstanceProfile").configure { env 'ACTIVATE_INSTANCE_PROFILE', true diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index f6549a2d83fe6..d8f906b23d523 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -11,6 +11,10 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils +import org.elasticsearch.gradle.testclusters.TestClustersPlugin apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -63,6 +67,8 @@ excludeList.add('indices.resolve_index/20_resolve_system_index/*') // Excluded because the error has changed excludeList.add('aggregations/percentiles_hdr_metric/Negative values test') +def clusterPath = getPath() + buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> if (bwcVersion != VersionProperties.getElasticsearchVersion()) { /* This project runs the core REST tests against a 4 node cluster where two of @@ -84,18 +90,42 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { useCluster baseCluster mustRunAfter("precommit") + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def baseInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterOneNodeUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + + def baseInfoAfterTwoNodesUpdate = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set(baseName) + it.parameters.service = serviceProvider + }.map { it.getAllHttpSocketURI() } + def nonInputProps = nonInputProperties + def sharedRepoFolder = new File(buildDir, "cluster/shared/repo/${baseName}") doFirst { - delete("${buildDir}/cluster/shared/repo/${baseName}") + delete(sharedRepoFolder) // Getting the endpoints causes a wait for the cluster - println "Test cluster endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Test cluster endpoints are: ${-> baseInfo.get().join(",")}" println "Upgrading one node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster - println "Upgrade complete, endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" + println "Upgrade complete, endpoints are: ${-> baseInfoAfterOneNodeUpdate.get().join(",")}" println "Upgrading another node to create a mixed cluster" baseCluster.get().nextNodeToNextVersion() - nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) - nonInputProperties.systemProperty('tests.clustername', baseName) + nonInputProps.systemProperty('tests.rest.cluster', baseInfoAfterTwoNodesUpdate.map(c -> c.join(","))) + nonInputProps.systemProperty('tests.clustername', baseName) if (excludeList.isEmpty() == false) { systemProperty 'tests.rest.blacklist', excludeList.join(',') } @@ -103,7 +133,7 @@ buildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> systemProperty 'tests.path.repo', "${buildDir}/cluster/shared/repo/${baseName}" systemProperty 'tests.bwc_nodes_version', bwcVersion.toString().replace('-SNAPSHOT', '') systemProperty 'tests.new_nodes_version', project.version.toString().replace('-SNAPSHOT', '') - onlyIf("BWC tests disabled") { project.bwc_tests_enabled } +// onlyIf("BWC tests disabled") { project.bwc_tests_enabled } } tasks.register(bwcTaskName(bwcVersion)) { diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index a60e58c34918b..9f9aa78a4910b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -12,18 +12,31 @@ import junit.framework.TestCase; import org.elasticsearch.packaging.util.Distribution; -import org.elasticsearch.packaging.util.FileUtils; +import org.elasticsearch.packaging.util.LintianResultParser; +import org.elasticsearch.packaging.util.LintianResultParser.Issue; +import org.elasticsearch.packaging.util.LintianResultParser.Result; import org.elasticsearch.packaging.util.Shell; import org.junit.BeforeClass; +import java.util.List; import java.util.Locale; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.junit.Assume.assumeTrue; public class DebMetadataTests extends PackagingTestCase { + private final LintianResultParser lintianParser = new LintianResultParser(); + private static final List IGNORED_TAGS = List.of( + // Override syntax changes between lintian versions in a non-backwards compatible way, so we have to tolerate these. + // Tag mismatched-override is a non-erasable tag which cannot be ignored with overrides, so we handle it here. + "mismatched-override", + // systemd-service-file-outside-lib has been incorrect and removed in the newer version on Lintian + "systemd-service-file-outside-lib" + ); + @BeforeClass public static void filterDistros() { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); @@ -35,15 +48,26 @@ public void test05CheckLintian() { if (helpText.contains("--fail-on-warnings")) { extraArgs = "--fail-on-warnings"; } else if (helpText.contains("--fail-on error")) { - extraArgs = "--fail-on warning"; - // Recent lintian versions are picky about malformed or mismatched overrides. - // Unfortunately override syntax changes between lintian versions in a non-backwards compatible - // way, so we have to tolerate these (or maintain separate override files per lintian version). - if (helpText.contains("--suppress-tags")) { - extraArgs += " --suppress-tags malformed-override,mismatched-override"; + extraArgs = "--fail-on error,warning"; + } + Shell.Result result = sh.runIgnoreExitCode( + String.format(Locale.ROOT, "lintian %s %s", extraArgs, getDistributionFile(distribution())) + ); + Result lintianResult = lintianParser.parse(result.stdout()); + // Unfortunately Lintian overrides syntax changes between Lintian versions in a non-backwards compatible + // way, so we have to manage some exclusions outside the overrides file. + if (lintianResult.isSuccess() == false) { + List importantIssues = lintianResult.issues() + .stream() + .filter(issue -> IGNORED_TAGS.contains(issue.tag()) == false) + .toList(); + if (importantIssues.isEmpty() == false) { + fail( + "Issues for DEB package found by Lintian:\n" + + importantIssues.stream().map(Record::toString).collect(Collectors.joining("\n")) + ); } } - sh.run(String.format(Locale.ROOT, "lintian %s %s", extraArgs, FileUtils.getDistributionFile(distribution()))); } public void test06Dependencies() { diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java new file mode 100644 index 0000000000000..511080427ea77 --- /dev/null +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.packaging.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class LintianResultParser { + + private static final Logger logger = LogManager.getLogger(LintianResultParser.class); + private static final Pattern RESULT_PATTERN = Pattern.compile("(?[EW]): (?\\S+): (?\\S+) (?.+)"); + + public Result parse(String output) { + String[] lines = output.split("\n"); + List issues = Arrays.stream(lines).map(line -> { + Matcher matcher = RESULT_PATTERN.matcher(line); + if (matcher.matches() == false) { + logger.info("Lintian output not matching expected pattern: {}", line); + return null; + } + Severity severity = switch (matcher.group("severity")) { + case "E" -> Severity.ERROR; + case "W" -> Severity.WARNING; + default -> Severity.UNKNOWN; + }; + return new Issue(severity, matcher.group("tag"), matcher.group("message")); + }).filter(Objects::nonNull).toList(); + + return new Result(issues.stream().noneMatch(it -> it.severity == Severity.ERROR || it.severity == Severity.WARNING), issues); + } + + public record Result(boolean isSuccess, List issues) {} + + public record Issue(Severity severity, String tag, String message) {} + + enum Severity { + ERROR, + WARNING, + UNKNOWN + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json index bce8dfd794dca..6f3d09c15c081 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/open_point_in_time.json @@ -55,6 +55,10 @@ "type": "string", "description": "Specific the time to live for the point in time", "required": true + }, + "allow_partial_search_results": { + "type": "boolean", + "description": "Specify whether to tolerate shards missing when creating the point-in-time, or otherwise throw an exception. (default: false)" } }, "body":{ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index 4977d87d5a348..deae022795ad2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -65,7 +65,7 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .put(super.nodeSettings(nodeOrdinal, otherSettings)) .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") - .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "2KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") .build(); } @@ -161,6 +161,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { IndexRequest indexRequest = indexRequest(index); long total = indexRequest.ramBytesUsed(); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); while (total < 2048) { refCounted.incRef(); handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); @@ -175,6 +177,8 @@ public void testIncrementalBulkLowWatermarkBackOff() throws Exception { handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); PlainActionFuture future = new PlainActionFuture<>(); handler.lastItems(List.of(indexRequest), refCounted::decRef, future); @@ -192,6 +196,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + long lowWaterMarkSplits = indexingPressure.stats().getLowWaterMarkSplits(); + long highWaterMarkSplits = indexingPressure.stats().getHighWaterMarkSplits(); AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); AtomicBoolean nextPage = new AtomicBoolean(false); @@ -217,6 +223,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); assertTrue(nextPage.get()); nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); ArrayList> requestsThrottle = new ArrayList<>(); // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled @@ -235,6 +243,8 @@ public void testIncrementalBulkHighWatermarkBackOff() throws Exception { // Wait until we are ready for the next page assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(highWaterMarkSplits + 1))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(lowWaterMarkSplits)); for (IncrementalBulkService.Handler h : handlers) { refCounted.incRef(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java index 9364e7437141e..e4d44212f2854 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/monitor/metrics/NodeIndexingMetricsIT.java @@ -9,35 +9,48 @@ package org.elasticsearch.monitor.metrics; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.bulk.IncrementalBulkService; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexingPressure; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.telemetry.Measurement; import org.elasticsearch.telemetry.TestTelemetryPlugin; import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CyclicBarrier; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Function; import static org.elasticsearch.index.IndexingPressure.MAX_COORDINATING_BYTES; import static org.elasticsearch.index.IndexingPressure.MAX_PRIMARY_BYTES; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThan; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0, numClientNodes = 0) public class NodeIndexingMetricsIT extends ESIntegTestCase { @@ -453,6 +466,211 @@ public void testPrimaryDocumentRejectionMetricsFluctuatingOverTime() throws Exce } } + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkLowWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + IndexRequest indexRequest = indexRequest(index); + long total = indexRequest.ramBytesUsed(); + while (total < 2048) { + refCounted.incRef(); + handler.addItems(List.of(indexRequest), refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + indexRequest = indexRequest(index); + total += indexRequest.ramBytesUsed(); + } + + assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), greaterThan(0L)); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + refCounted.incRef(); + handler.addItems(List.of(indexRequest(index)), refCounted::decRef, () -> nextPage.set(true)); + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + assertBusy(() -> assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + PlainActionFuture future = new PlainActionFuture<>(); + handler.lastItems(List.of(indexRequest), refCounted::decRef, future); + + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + assertFalse(refCounted.hasReferences()); + } + + // Borrowed this test from IncrementalBulkIT and added test for metrics to it + public void testIncrementalBulkHighWatermarkSplitMetrics() throws Exception { + final String nodeName = internalCluster().startNode( + Settings.builder() + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK.getKey(), "512B") + .put(IndexingPressure.SPLIT_BULK_LOW_WATERMARK_SIZE.getKey(), "2048B") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK.getKey(), "4KB") + .put(IndexingPressure.SPLIT_BULK_HIGH_WATERMARK_SIZE.getKey(), "1024B") + .build() + ); + ensureStableCluster(1); + + String index = "test"; + createIndex(index); + + IncrementalBulkService incrementalBulkService = internalCluster().getInstance(IncrementalBulkService.class, nodeName); + IndexingPressure indexingPressure = internalCluster().getInstance(IndexingPressure.class, nodeName); + ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, nodeName); + final TestTelemetryPlugin testTelemetryPlugin = internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(TestTelemetryPlugin.class) + .findFirst() + .orElseThrow(); + testTelemetryPlugin.resetMeter(); + + AbstractRefCounted refCounted = AbstractRefCounted.of(() -> {}); + AtomicBoolean nextPage = new AtomicBoolean(false); + + ArrayList handlers = new ArrayList<>(); + for (int i = 0; i < 4; ++i) { + ArrayList> requests = new ArrayList<>(); + add512BRequests(requests, index); + IncrementalBulkService.Handler handler = incrementalBulkService.newBulkRequest(); + handlers.add(handler); + refCounted.incRef(); + handler.addItems(requests, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + } + + // Test that a request smaller than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is not throttled + ArrayList> requestsNoThrottle = new ArrayList<>(); + add512BRequests(requestsNoThrottle, index); + IncrementalBulkService.Handler handlerNoThrottle = incrementalBulkService.newBulkRequest(); + handlers.add(handlerNoThrottle); + refCounted.incRef(); + handlerNoThrottle.addItems(requestsNoThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertTrue(nextPage.get()); + nextPage.set(false); + assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getSingleRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + + ArrayList> requestsThrottle = new ArrayList<>(); + // Test that a request larger than SPLIT_BULK_HIGH_WATERMARK_SIZE (1KB) is throttled + add512BRequests(requestsThrottle, index); + add512BRequests(requestsThrottle, index); + + CountDownLatch finishLatch = new CountDownLatch(1); + blockWritePool(threadPool, finishLatch); + IncrementalBulkService.Handler handlerThrottled = incrementalBulkService.newBulkRequest(); + refCounted.incRef(); + handlerThrottled.addItems(requestsThrottle, refCounted::decRef, () -> nextPage.set(true)); + assertFalse(nextPage.get()); + finishLatch.countDown(); + + handlers.add(handlerThrottled); + + // Wait until we are ready for the next page + assertBusy(() -> assertTrue(nextPage.get())); + assertBusy(() -> assertThat(indexingPressure.stats().getHighWaterMarkSplits(), equalTo(1L))); + assertThat(indexingPressure.stats().getLowWaterMarkSplits(), equalTo(0L)); + + testTelemetryPlugin.collect(); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.low_watermark_splits.total" + ).getLong(), + equalTo(0L) + ); + assertThat( + getLatestRecordedMetric( + testTelemetryPlugin::getLongAsyncCounterMeasurement, + "es.indexing.coordinating.high_watermark_splits.total" + ).getLong(), + equalTo(1L) + ); + + for (IncrementalBulkService.Handler h : handlers) { + refCounted.incRef(); + PlainActionFuture future = new PlainActionFuture<>(); + h.lastItems(List.of(indexRequest(index)), refCounted::decRef, future); + BulkResponse bulkResponse = safeGet(future); + assertNoFailures(bulkResponse); + } + + assertBusy(() -> assertThat(indexingPressure.stats().getCurrentCombinedCoordinatingAndPrimaryBytes(), equalTo(0L))); + refCounted.decRef(); + assertFalse(refCounted.hasReferences()); + testTelemetryPlugin.collect(); + } + private static Measurement getSingleRecordedMetric(Function> metricGetter, String name) { final List measurements = metricGetter.apply(name); assertFalse("Indexing metric is not recorded", measurements.isEmpty()); @@ -470,4 +688,47 @@ private static boolean doublesEquals(double expected, double actual) { final double eps = .0000001; return Math.abs(expected - actual) < eps; } + + private static IndexRequest indexRequest(String index) { + IndexRequest indexRequest = new IndexRequest(); + indexRequest.index(index); + indexRequest.source(Map.of("field", randomAlphaOfLength(10))); + return indexRequest; + } + + private static void add512BRequests(ArrayList> requests, String index) { + long total = 0; + while (total < 512) { + IndexRequest indexRequest = indexRequest(index); + requests.add(indexRequest); + total += indexRequest.ramBytesUsed(); + } + assertThat(total, lessThan(1024L)); + } + + private static void blockWritePool(ThreadPool threadPool, CountDownLatch finishLatch) { + final var threadCount = threadPool.info(ThreadPool.Names.WRITE).getMax(); + final var startBarrier = new CyclicBarrier(threadCount + 1); + final var blockingTask = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + fail(e); + } + + @Override + protected void doRun() { + safeAwait(startBarrier); + safeAwait(finishLatch); + } + + @Override + public boolean isForceExecution() { + return true; + } + }; + for (int i = 0; i < threadCount; i++) { + threadPool.executor(ThreadPool.Names.WRITE).execute(blockingTask); + } + safeAwait(startBarrier); + } } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 887cfea36a199..688d2aaf905a6 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -204,6 +204,8 @@ static TransportVersion def(int id) { public static final TransportVersion FAST_REFRESH_RCO_2 = def(8_795_00_0); public static final TransportVersion ESQL_ENRICH_RUNTIME_WARNINGS = def(8_796_00_0); public static final TransportVersion INGEST_PIPELINE_CONFIGURATION_AS_MAP = def(8_797_00_0); + public static final TransportVersion INDEXING_PRESSURE_THROTTLING_STATS = def(8_798_00_0); + public static final TransportVersion REINDEX_DATA_STREAMS = def(8_799_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 7791ca200a785..7b65547a7d591 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -189,7 +189,9 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_4 = new Version(8_15_04_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); + public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_17_0 = new Version(8_17_00_99); + public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index e2475bca31d53..afe615add28df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndices; @@ -56,15 +55,13 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA /** * Once all feature migrations for 8.x -> 9.x have been tested, we can bump this to Version.V_8_0_0 */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; + public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_8_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; @Inject - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // Once we begin working on 9.x, we need to update our migration classes public TransportGetFeatureUpgradeStatusAction( TransportService transportService, ThreadPool threadPool, @@ -149,7 +146,6 @@ static GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus getFeatureUpgradeSta .map(idxInfo -> ERROR) .map(idxStatus -> GetFeatureUpgradeStatusResponse.UpgradeStatus.combine(idxStatus, initialStatus)) .orElse(initialStatus); - return new GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus(featureName, minimumVersion, status, indexInfos); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java index c1f867c247345..5c4be62723e07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsNodes.java @@ -793,6 +793,8 @@ static class IndexPressureStats implements ToXContentFragment { long currentCoordinatingOps = 0; long currentPrimaryOps = 0; long currentReplicaOps = 0; + long lowWaterMarkSplits = 0; + long highWaterMarkSplits = 0; for (NodeStats nodeStat : nodeStats) { IndexingPressureStats nodeStatIndexingPressureStats = nodeStat.getIndexingPressureStats(); if (nodeStatIndexingPressureStats != null) { @@ -816,6 +818,8 @@ static class IndexPressureStats implements ToXContentFragment { currentReplicaOps += nodeStatIndexingPressureStats.getCurrentReplicaOps(); primaryDocumentRejections += nodeStatIndexingPressureStats.getPrimaryDocumentRejections(); totalCoordinatingRequests += nodeStatIndexingPressureStats.getTotalCoordinatingRequests(); + lowWaterMarkSplits += nodeStatIndexingPressureStats.getLowWaterMarkSplits(); + highWaterMarkSplits += nodeStatIndexingPressureStats.getHighWaterMarkSplits(); } } indexingPressureStats = new IndexingPressureStats( @@ -838,7 +842,9 @@ static class IndexPressureStats implements ToXContentFragment { currentPrimaryOps, currentReplicaOps, primaryDocumentRejections, - totalCoordinatingRequests + totalCoordinatingRequests, + lowWaterMarkSplits, + highWaterMarkSplits ); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java index d8db2c5e657b4..ce9b48666d6ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsRequest.java @@ -48,9 +48,10 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, return new CancellableTask(id, type, action, "", parentTaskId, headers); } - public ClusterStatsRequest asRemoteStats() { - this.remoteStats = true; - return this; + public static ClusterStatsRequest newRemoteClusterStatsRequest() { + final var request = new ClusterStatsRequest(); + request.remoteStats = true; + return request; } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 36b018b5002eb..97585ea9a1024 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterSnapshotStats; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; @@ -108,20 +109,19 @@ public class TransportClusterStatsAction extends TransportNodesAction< private final MetadataStatsCache mappingStatsCache; private final MetadataStatsCache analysisStatsCache; private final RemoteClusterService remoteClusterService; - private final TransportRemoteClusterStatsAction remoteClusterStatsAction; @Inject public TransportClusterStatsAction( ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + Client client, NodeService nodeService, IndicesService indicesService, RepositoriesService repositoriesService, UsageService usageService, ActionFilters actionFilters, - Settings settings, - TransportRemoteClusterStatsAction remoteClusterStatsAction + Settings settings ) { super( TYPE.name(), @@ -141,7 +141,9 @@ public TransportClusterStatsAction( this.analysisStatsCache = new MetadataStatsCache<>(threadPool.getThreadContext(), AnalysisStats::of); this.remoteClusterService = transportService.getRemoteClusterService(); this.settings = settings; - this.remoteClusterStatsAction = remoteClusterStatsAction; + + // register remote-cluster action with transport service only and not as a local-node Action that the Client can invoke + new TransportRemoteClusterStatsAction(client, transportService, actionFilters); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java index 4d57f10807af6..882aaa8b18e15 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportRemoteClusterStatsAction.java @@ -10,11 +10,11 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionType; import org.elasticsearch.action.RemoteClusterActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -27,26 +27,26 @@ public class TransportRemoteClusterStatsAction extends HandledTransportAction { public static final String NAME = "cluster:monitor/stats/remote"; - public static final ActionType TYPE = new ActionType<>(NAME); public static final RemoteClusterActionType REMOTE_TYPE = new RemoteClusterActionType<>( NAME, RemoteClusterStatsResponse::new ); - private final NodeClient client; + + private final Client client; + private final TransportService transportService; @Inject - public TransportRemoteClusterStatsAction(NodeClient client, TransportService transportService, ActionFilters actionFilters) { + public TransportRemoteClusterStatsAction(Client client, TransportService transportService, ActionFilters actionFilters) { super(NAME, transportService, actionFilters, RemoteClusterStatsRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.client = client; + this.transportService = transportService; } @Override protected void doExecute(Task task, RemoteClusterStatsRequest request, ActionListener listener) { - ClusterStatsRequest subRequest = new ClusterStatsRequest().asRemoteStats(); - subRequest.setParentTask(request.getParentTask()); - client.execute( + new ParentTaskAssigningClient(client, transportService.getLocalNode(), task).execute( TransportClusterStatsAction.TYPE, - subRequest, + ClusterStatsRequest.newRemoteClusterStatsRequest(), listener.map( clusterStatsResponse -> new RemoteClusterStatsResponse( clusterStatsResponse.getClusterUUID(), diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java new file mode 100644 index 0000000000000..814c512c43bec --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/datastreams/ReindexDataStreamAction.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.Objects; + +public class ReindexDataStreamAction extends ActionType { + + public static final ReindexDataStreamAction INSTANCE = new ReindexDataStreamAction(); + public static final String NAME = "indices:admin/data_stream/reindex"; + + public ReindexDataStreamAction() { + super(NAME); + } + + public static class ReindexDataStreamResponse extends ActionResponse implements ToXContentObject { + private final String taskId; + + public ReindexDataStreamResponse(String taskId) { + super(); + this.taskId = taskId; + } + + public ReindexDataStreamResponse(StreamInput in) throws IOException { + super(in); + this.taskId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(taskId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("task", getTaskId()); + builder.endObject(); + return builder; + } + + public String getTaskId() { + return taskId; + } + + @Override + public int hashCode() { + return Objects.hashCode(taskId); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamResponse && taskId.equals(((ReindexDataStreamResponse) other).taskId); + } + + } + + public static class ReindexDataStreamRequest extends ActionRequest { + private final String sourceDataStream; + + public ReindexDataStreamRequest(String sourceDataStream) { + super(); + this.sourceDataStream = sourceDataStream; + } + + public ReindexDataStreamRequest(StreamInput in) throws IOException { + super(in); + this.sourceDataStream = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(sourceDataStream); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public boolean getShouldStoreResult() { + return true; // do not wait_for_completion + } + + public String getSourceDataStream() { + return sourceDataStream; + } + + @Override + public int hashCode() { + return Objects.hashCode(sourceDataStream); + } + + @Override + public boolean equals(Object other) { + return other instanceof ReindexDataStreamRequest + && sourceDataStream.equals(((ReindexDataStreamRequest) other).sourceDataStream); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 633e7ef6793ab..93e1b18ec64c6 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -280,15 +281,15 @@ private void tryShardMultiGetFromTranslog( } else { assert r.segmentGeneration() > -1L; assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), + final ActionListener termAndGenerationListener = ContextPreservingActionListener.wrapPreservingContext( listener.delegateFailureAndWrap( (ll, aLong) -> getExecutor(request, shardId).execute( ActionRunnable.supply(ll, () -> handleLocalGets(request, r.multiGetShardResponse(), shardId)) ) - ) + ), + threadPool.getThreadContext() ); + indexShard.waitForPrimaryTermAndGeneration(r.primaryTerm(), r.segmentGeneration(), termAndGenerationListener); } } }), TransportShardMultiGetFomTranslogAction.Response::new, getExecutor(request, shardId)) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java index 699198a8e22c2..56d185645e149 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Bootstrap.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.env.Environment; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginsLoader; import java.io.PrintStream; @@ -42,6 +43,9 @@ class Bootstrap { // the loaded settings for the node, not valid until after phase 2 of initialization private final SetOnce nodeEnv = new SetOnce<>(); + // loads information about plugins required for entitlements in phase 2, used by plugins service in phase 3 + private final SetOnce pluginsLoader = new SetOnce<>(); + Bootstrap(PrintStream out, PrintStream err, ServerArgs args) { this.out = out; this.err = err; @@ -72,6 +76,14 @@ Environment environment() { return nodeEnv.get(); } + void setPluginsLoader(PluginsLoader pluginsLoader) { + this.pluginsLoader.set(pluginsLoader); + } + + PluginsLoader pluginsLoader() { + return pluginsLoader.get(); + } + void exitWithNodeValidationException(NodeValidationException e) { Logger logger = LogManager.getLogger(Elasticsearch.class); logger.error("node validation exception\n{}", e.getMessage()); diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 2a83f749e7d33..95e5b00a2805f 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -30,6 +30,7 @@ import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.Tuple; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; @@ -41,6 +42,9 @@ import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.node.Node; import org.elasticsearch.node.NodeValidationException; +import org.elasticsearch.plugins.PluginBundle; +import org.elasticsearch.plugins.PluginsLoader; +import org.elasticsearch.plugins.PluginsUtils; import java.io.IOException; import java.io.InputStream; @@ -50,8 +54,10 @@ import java.nio.file.Path; import java.security.Permission; import java.security.Security; +import java.util.ArrayList; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -199,9 +205,24 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { VectorUtil.class ); + // load the plugin Java modules and layers now for use in entitlements + var pluginsLoader = new PluginsLoader(nodeEnv.modulesFile(), nodeEnv.pluginsFile()); + bootstrap.setPluginsLoader(pluginsLoader); + if (Boolean.parseBoolean(System.getProperty("es.entitlements.enabled"))) { logger.info("Bootstrapping Entitlements"); - EntitlementBootstrap.bootstrap(); + + List> pluginData = new ArrayList<>(); + Set moduleBundles = PluginsUtils.getModuleBundles(nodeEnv.modulesFile()); + for (PluginBundle moduleBundle : moduleBundles) { + pluginData.add(Tuple.tuple(moduleBundle.getDir(), moduleBundle.pluginDescriptor().isModular())); + } + Set pluginBundles = PluginsUtils.getPluginBundles(nodeEnv.pluginsFile()); + for (PluginBundle pluginBundle : pluginBundles) { + pluginData.add(Tuple.tuple(pluginBundle.getDir(), pluginBundle.pluginDescriptor().isModular())); + } + // TODO: add a functor to map module to plugin name + EntitlementBootstrap.bootstrap(pluginData, callerClass -> null); } else { // install SM after natives, shutdown hooks, etc. logger.info("Bootstrapping java SecurityManager"); @@ -244,7 +265,7 @@ private static void ensureInitialized(Class... classes) { private static void initPhase3(Bootstrap bootstrap) throws IOException, NodeValidationException { checkLucene(); - Node node = new Node(bootstrap.environment()) { + Node node = new Node(bootstrap.environment(), bootstrap.pluginsLoader()) { @Override protected void validateNodeBeforeAcceptingRequests( final BootstrapContext context, diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 8a4464f194fc5..35b6eb1852237 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -246,6 +246,7 @@ public Coordinator( this.joinValidationService = new JoinValidationService( settings, transportService, + namedWriteableRegistry, this::getStateForJoinValidationService, () -> getLastAcceptedState().metadata(), this.onJoinValidators diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java index 7de7fd4d92d1b..9d5d74fa24648 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinValidationService.java @@ -21,6 +21,8 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -106,6 +108,7 @@ public class JoinValidationService { public JoinValidationService( Settings settings, TransportService transportService, + NamedWriteableRegistry namedWriteableRegistry, Supplier clusterStateSupplier, Supplier metadataSupplier, Collection> joinValidators @@ -120,9 +123,9 @@ public JoinValidationService( transportService.registerRequestHandler( JoinValidationService.JOIN_VALIDATE_ACTION_NAME, this.responseExecutor, - ValidateJoinRequest::new, + BytesTransportRequest::new, (request, channel, task) -> { - final var remoteState = request.getOrReadState(); + final var remoteState = readClusterState(namedWriteableRegistry, request); final var remoteMetadata = remoteState.metadata(); final var localMetadata = metadataSupplier.get(); if (localMetadata.clusterUUIDCommitted() && localMetadata.clusterUUID().equals(remoteMetadata.clusterUUID()) == false) { @@ -145,6 +148,20 @@ public JoinValidationService( ); } + private static ClusterState readClusterState(NamedWriteableRegistry namedWriteableRegistry, BytesTransportRequest request) + throws IOException { + try ( + var bytesStreamInput = request.bytes().streamInput(); + var in = new NamedWriteableAwareStreamInput( + CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), + namedWriteableRegistry + ) + ) { + in.setTransportVersion(request.version()); + return ClusterState.readFrom(in, null); + } + } + public void validateJoin(DiscoveryNode discoveryNode, ActionListener listener) { // This node isn't in the cluster yet so ClusterState#getMinTransportVersion() doesn't apply, we must obtain a specific connection // so we can check its transport version to decide how to proceed. diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java deleted file mode 100644 index c81e4877196b3..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/ValidateJoinRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.cluster.coordination; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressorFactory; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; -import org.elasticsearch.transport.TransportRequest; - -import java.io.IOException; - -public class ValidateJoinRequest extends TransportRequest { - private final CheckedSupplier stateSupplier; - private final RefCounted refCounted; - - public ValidateJoinRequest(StreamInput in) throws IOException { - super(in); - // recent versions send a BytesTransportRequest containing a compressed representation of the state - final var bytes = in.readReleasableBytesReference(); - final var version = in.getTransportVersion(); - final var namedWriteableRegistry = in.namedWriteableRegistry(); - this.stateSupplier = () -> readCompressed(version, bytes, namedWriteableRegistry); - this.refCounted = bytes; - } - - private static ClusterState readCompressed( - TransportVersion version, - BytesReference bytes, - NamedWriteableRegistry namedWriteableRegistry - ) throws IOException { - try ( - var bytesStreamInput = bytes.streamInput(); - var in = new NamedWriteableAwareStreamInput( - CompressorFactory.COMPRESSOR.threadLocalStreamInput(bytesStreamInput), - namedWriteableRegistry - ) - ) { - in.setTransportVersion(version); - return ClusterState.readFrom(in, null); - } - } - - public ValidateJoinRequest(ClusterState state) { - this.stateSupplier = () -> state; - this.refCounted = null; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - stateSupplier.get().writeTo(out); - } - - public ClusterState getOrReadState() throws IOException { - return stateSupplier.get(); - } - - @Override - public void incRef() { - if (refCounted != null) { - refCounted.incRef(); - } - } - - @Override - public boolean tryIncRef() { - return refCounted == null || refCounted.tryIncRef(); - } - - @Override - public boolean decRef() { - return refCounted != null && refCounted.decRef(); - } - - @Override - public boolean hasReferences() { - return refCounted == null || refCounted.hasReferences(); - } -} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index bf80c38d64a4e..279243eeff7cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -48,17 +48,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; +/** + * This class main focus is to resolve multi-syntax target expressions to resources or concrete indices. This resolution is influenced + * by IndicesOptions and other flags passed through the method call. Examples of the functionality it provides: + * - Resolve expressions to concrete indices + * - Resolve expressions to data stream names + * - Resolve expressions to resources (meaning indices, data streams and aliases) + * Note: This class is performance sensitive, so we pay extra attention on the data structure usage and we avoid streams and iterators + * when possible in favor of the classic for-i loops. + */ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); @@ -190,7 +197,7 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); return expressions.stream() .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) @@ -220,7 +227,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressionsToResources(context, request.index()); if (expressions.size() == 1) { IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); @@ -236,7 +243,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit ); } } - checkSystemIndexAccess(context, Set.of(ia.getWriteIndex())); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, ia.getWriteIndex()); return ia; } else { throw new IllegalArgumentException( @@ -245,30 +252,110 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { - if (context.getOptions().expandWildcardExpressions() == false) { + /** + * Resolve the expression to the set of indices, aliases, and, optionally, data streams that the expression matches. + * If {@param preserveDataStreams} is {@code true}, data streams that are covered by the wildcards from the + * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. + */ + protected static Collection resolveExpressionsToResources(Context context, String... expressions) { + // If we do not expand wildcards, then empty or _all expression result in an empty list + boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + if (expandWildcards == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); - } else { - return ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ); } } else { if (expressions == null || expressions.length == 0 || expressions.length == 1 && (Metadata.ALL.equals(expressions[0]) || Regex.isMatchAllPattern(expressions[0]))) { return WildcardExpressionResolver.resolveAll(context); + } else if (isNoneExpression(expressions)) { + return List.of(); + } + } + + // Using ArrayList when we know we do not have wildcards is an optimisation, given that one expression result in 0 or 1 resources. + Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) + ? new LinkedHashSet<>() + : new ArrayList<>(expressions.length); + boolean wildcardSeen = false; + for (int i = 0, n = expressions.length; i < n; i++) { + String originalExpression = expressions[i]; + + // Resolve exclusion, a `-` prefixed expression is an exclusion only if it succeeds a wildcard. + boolean isExclusion = wildcardSeen && originalExpression.startsWith("-"); + String baseExpression = isExclusion ? originalExpression.substring(1) : originalExpression; + + // Resolve date math + baseExpression = DateMathExpressionResolver.resolveExpression(baseExpression, context::getStartTime); + + // Validate base expression + validateResourceExpression(context, baseExpression, expressions); + + // Check if it's wildcard + boolean isWildcard = expandWildcards && WildcardExpressionResolver.isWildcard(originalExpression); + wildcardSeen |= isWildcard; + + if (isWildcard) { + Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); + + if (context.getOptions().allowNoIndices() == false && matchingResources.isEmpty()) { + throw notFoundException(baseExpression); + } + + if (isExclusion) { + resources.removeAll(matchingResources); + } else { + resources.addAll(matchingResources); + } } else { - return WildcardExpressionResolver.resolve( - context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ) - ); + if (isExclusion) { + resources.remove(baseExpression); + } else if (ensureAliasOrIndexExists(context, baseExpression)) { + resources.add(baseExpression); + } + } + } + return resources; + } + + /** + * Validates the requested expression by performing the following checks: + * - Ensure it's not empty + * - Ensure it doesn't start with `_` + * - Ensure it's not a remote expression unless the allow unavailable targets is enabled. + */ + private static void validateResourceExpression(Context context, String current, String[] expressions) { + if (Strings.isEmpty(current)) { + throw notFoundException(current); + } + // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API + // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, + // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown + // if the expression can't be found. + if (current.charAt(0) == '_') { + throw new InvalidIndexNameException(current, "must not start with '_'."); + } + ensureRemoteExpressionRequireIgnoreUnavailable(context.getOptions(), current, expressions); + } + + /** + * Throws an exception if the expression is a remote expression and we do not allow unavailable targets + */ + private static void ensureRemoteExpressionRequireIgnoreUnavailable(IndicesOptions options, String current, String[] expressions) { + if (options.ignoreUnavailable()) { + return; + } + if (RemoteClusterAware.isRemoteIndexName(current)) { + List crossClusterIndices = new ArrayList<>(); + for (int i = 0; i < expressions.length; i++) { + if (RemoteClusterAware.isRemoteIndexName(expressions[i])) { + crossClusterIndices.add(expressions[i]); + } } + throw new IllegalArgumentException( + "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices + ); } } @@ -341,7 +428,7 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); @@ -395,7 +482,9 @@ Index[] concreteIndices(Context context, String... indexExpressions) { && context.getOptions().includeFailureIndices()) { // Collect the data streams involved Set aliasDataStreams = new HashSet<>(); - for (Index index : indexAbstraction.getIndices()) { + List indices = indexAbstraction.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); aliasDataStreams.add(indicesLookup.get(index.getName()).getParentDataStream()); } for (DataStream dataStream : aliasDataStreams) { @@ -416,13 +505,16 @@ Index[] concreteIndices(Context context, String... indexExpressions) { if (context.getOptions().allowNoIndices() == false && concreteIndicesResult.isEmpty()) { throw notFoundException(indexExpressions); } - checkSystemIndexAccess(context, concreteIndicesResult); - return concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + Index[] resultArray = concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, resultArray); + return resultArray; } private static void resolveIndicesForDataStream(Context context, DataStream dataStream, Set concreteIndicesResult) { if (shouldIncludeRegularIndices(context.getOptions())) { - for (Index index : dataStream.getIndices()) { + List indices = dataStream.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -431,7 +523,9 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions())) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices().getIndices()) { + List failureIndices = dataStream.getFailureIndices().getIndices(); + for (int i = 0, n = failureIndices.size(); i < n; i++) { + Index index = failureIndices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -482,64 +576,6 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract return indexAbstraction.getIndices().size() > 1; } - private void checkSystemIndexAccess(Context context, Set concreteIndices) { - final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); - if (systemIndexAccessPredicate == Predicates.always()) { - return; - } - doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); - } - - private void doCheckSystemIndexAccess(Context context, Set concreteIndices, Predicate systemIndexAccessPredicate) { - final Metadata metadata = context.getState().metadata(); - final List resolvedSystemIndices = new ArrayList<>(); - final List resolvedNetNewSystemIndices = new ArrayList<>(); - final Set resolvedSystemDataStreams = new HashSet<>(); - final SortedMap indicesLookup = metadata.getIndicesLookup(); - boolean matchedIndex = false; - for (Index concreteIndex : concreteIndices) { - IndexMetadata idxMetadata = metadata.index(concreteIndex); - String name = concreteIndex.getName(); - if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { - matchedIndex = true; - IndexAbstraction indexAbstraction = indicesLookup.get(name); - if (indexAbstraction.getParentDataStream() != null) { - resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); - } else if (systemIndices.isNetNewSystemIndex(name)) { - resolvedNetNewSystemIndices.add(name); - } else { - resolvedSystemIndices.add(name); - } - } - } - if (matchedIndex) { - handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices); - } - } - - private void handleMatchedSystemIndices( - List resolvedSystemIndices, - Set resolvedSystemDataStreams, - List resolvedNetNewSystemIndices - ) { - if (resolvedSystemIndices.isEmpty() == false) { - Collections.sort(resolvedSystemIndices); - deprecationLogger.warn( - DeprecationCategory.API, - "open_system_index_access", - "this request accesses system indices: {}, but in a future major version, direct access to system " - + "indices will be prevented by default", - resolvedSystemIndices - ); - } - if (resolvedSystemDataStreams.isEmpty() == false) { - throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); - } - if (resolvedNetNewSystemIndices.isEmpty() == false) { - throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); - } - } - private static IndexNotFoundException notFoundException(String... indexExpressions) { final IndexNotFoundException infe; if (indexExpressions == null @@ -568,16 +604,16 @@ private static IndexNotFoundException notFoundException(String... indexExpressio } private static boolean shouldTrackConcreteIndex(Context context, Index index) { - if (context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY - && context.netNewSystemIndexPredicate.test(index.getName())) { + if (SystemResourceAccess.isNetNewInBackwardCompatibleMode(context, index)) { // Exclude this one as it's a net-new system index, and we explicitly don't want those. return false; } + IndicesOptions options = context.getOptions(); if (DataStream.isFailureStoreFeatureFlagEnabled() && context.options.allowFailureIndices() == false) { DataStream parentDataStream = context.getState().metadata().getIndicesLookup().get(index.getName()).getParentDataStream(); if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { if (parentDataStream.isFailureStoreIndex(index.getName())) { - if (context.options.ignoreUnavailable()) { + if (options.ignoreUnavailable()) { return false; } else { throw new FailureIndexNotSupportedException(index); @@ -587,7 +623,6 @@ private static boolean shouldTrackConcreteIndex(Context context, Index index) { } final IndexMetadata imd = context.state.metadata().index(index); if (imd.getState() == IndexMetadata.State.CLOSE) { - IndicesOptions options = context.options; if (options.forbidClosedIndices() && options.ignoreUnavailable() == false) { throw new IndexClosedException(index); } else { @@ -721,21 +756,6 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) return state.metadata().hasIndexAbstraction(resolvedAliasOrIndex); } - /** - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression) { - return DateMathExpressionResolver.resolveExpression(dateExpression); - } - - /** - * @param time instant to consider when parsing the expression - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression, long time) { - return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); - } - /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ @@ -765,7 +785,8 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); + // unmodifiable without creating a new collection as it might contain many items + Collection resolved = resolveExpressionsToResources(context, expressions); if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items return Collections.unmodifiableSet((Set) resolved); @@ -779,7 +800,7 @@ public Set resolveExpressions( * given index. *

Only aliases with filters are returned. If the indices list contains a non-filtering reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. + * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); @@ -799,7 +820,8 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * Iterates through the list of indices and selects the effective list of required aliases for the given index. *

Only aliases where the given predicate tests successfully are returned. If the indices list contains a non-required reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - *

NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. + *

NOTE: the provided expressions must have been resolved already via + * {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] indexAliases( ClusterState state, @@ -878,7 +900,8 @@ public String[] indexAliases( .toArray(AliasMetadata[]::new); } List aliases = null; - for (AliasMetadata aliasMetadata : aliasCandidates) { + for (int i = 0; i < aliasCandidates.length; i++) { + AliasMetadata aliasMetadata = aliasCandidates[i]; if (requiredAlias.test(aliasMetadata)) { // If required - add it to the list of aliases if (aliases == null) { @@ -914,7 +937,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); // TODO: it appears that this can never be true? if (isAllIndices(resolvedExpressions)) { @@ -932,7 +955,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab for (String expression : resolvedExpressions) { IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { - for (Index index : indexAbstraction.getIndices()) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); String concreteIndex = index.getName(); if (norouting.contains(concreteIndex) == false) { AliasMetadata aliasMetadata = state.metadata().index(concreteIndex).getAliases().get(indexAbstraction.getName()); @@ -961,7 +985,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab continue; } if (dataStream.getIndices() != null) { - for (Index index : dataStream.getIndices()) { + for (int i = 0, n = dataStream.getIndices().size(); i < n; i++) { + Index index = dataStream.getIndices().get(i); String concreteIndex = index.getName(); routings = collectRoutings(routings, paramRouting, norouting, concreteIndex); } @@ -1006,8 +1031,8 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m Set r = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); Map> routings = new HashMap<>(); String[] concreteIndices = metadata.getConcreteAllIndices(); - for (String index : concreteIndices) { - routings.put(index, r); + for (int i = 0; i < concreteIndices.length; i++) { + routings.put(concreteIndices[i], r); } return routings; } @@ -1036,6 +1061,16 @@ static boolean isExplicitAllPattern(Collection aliasesOrIndices) { return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && Metadata.ALL.equals(aliasesOrIndices.iterator().next()); } + /** + * Identifies if this expression list is *,-* which effectively means a request that requests no indices. + */ + static boolean isNoneExpression(String[] expressions) { + return expressions.length == 2 && "*".equals(expressions[0]) && "-*".equals(expressions[1]); + } + + /** + * @return the system access level that will be applied in this resolution. See {@link SystemIndexAccessLevel} for details. + */ public SystemIndexAccessLevel getSystemIndexAccessLevel() { final SystemIndexAccessLevel accessLevel = SystemIndices.getSystemIndexAccessLevel(threadContext); assert accessLevel != SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY @@ -1043,6 +1078,14 @@ public SystemIndexAccessLevel getSystemIndexAccessLevel() { return accessLevel; } + /** + * Determines the right predicate based on the {@link IndexNameExpressionResolver#getSystemIndexAccessLevel()}. Specifically: + * - NONE implies no access to net-new system indices and data streams + * - BACKWARDS_COMPATIBLE_ONLY allows access also to net-new system resources + * - ALL allows access to everything + * - otherwise we fall back to {@link SystemIndices#getProductSystemIndexNamePredicate(ThreadContext)} + * @return the predicate that defines the access to system indices. + */ public Predicate getSystemIndexAccessPredicate() { final SystemIndexAccessLevel systemIndexAccessLevel = getSystemIndexAccessLevel(); final Predicate systemIndexAccessLevelPredicate; @@ -1067,6 +1110,43 @@ public Predicate getNetNewSystemIndexPredicate() { return systemIndices::isNetNewSystemIndex; } + /** + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * exception. + */ + @Nullable + private static boolean ensureAliasOrIndexExists(Context context, String name) { + boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); + IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); + if (indexAbstraction == null) { + if (ignoreUnavailable) { + return false; + } else { + throw notFoundException(name); + } + } + // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) + if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { + if (ignoreUnavailable) { + return false; + } else { + throw aliasesNotSupportedException(name); + } + } + if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { + if (ignoreUnavailable) { + return false; + } else { + IndexNotFoundException infe = notFoundException(name); + // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. + infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); + throw infe; + } + } + return true; + } + public static class Context { private final ClusterState state; @@ -1242,7 +1322,7 @@ public Predicate getSystemIndexAccessPredicate() { } /** - * Resolves alias/index name expressions with wildcards into the corresponding concrete indices/aliases + * Resolves name expressions with wildcards into the corresponding concrete indices/aliases/data streams */ static final class WildcardExpressionResolver { @@ -1251,8 +1331,8 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. - * Depending on the context, returns the names of the datastreams themselves or their backing indices. + * Returns all the indices, data streams, and aliases, considering the open/closed, system, and hidden context parameters. + * Depending on the context, returns the names of the data streams themselves or their backing indices. */ public static Collection resolveAll(Context context) { List concreteIndices = resolveEmptyOrTrivialWildcard(context); @@ -1261,16 +1341,17 @@ public static Collection resolveAll(Context context) { return concreteIndices; } - Stream ias = context.getState() + Set resolved = new HashSet<>(concreteIndices.size()); + context.getState() .metadata() .getIndicesLookup() .values() .stream() .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) - .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())) + .forEach(ia -> resolved.addAll(expandToOpenClosed(context, ia))); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1283,73 +1364,6 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } - /** - * Returns all the existing resource (index, alias and datastream) names that the {@param expressions} list resolves to. - * The passed-in {@param expressions} can contain wildcards and exclusions, as well as plain resource names. - *
- * The return is a {@code Collection} (usually a {@code Set} but can also be a {@code List}, for performance reasons) of plain - * resource names only. All the returned resources are "accessible", in the given context, i.e. the resources exist - * and are not an alias or a datastream if the context does not permit it. - * Wildcard expressions, depending on the context: - *

    - *
  1. might throw an exception if they don't resolve to anything
  2. - *
  3. might not resolve to hidden or system resources (but plain names can refer to hidden or system resources)
  4. - *
  5. might resolve to aliases and datastreams, and it could be (depending on the context) that their backing indices are what's - * ultimately returned, instead of the alias or datastream name
  6. - *
- */ - public static Collection resolve(Context context, List expressions) { - // fast exit if there are no wildcards to evaluate - if (context.getOptions().expandWildcardExpressions() == false) { - return expressions; - } - int firstWildcardIndex = 0; - for (; firstWildcardIndex < expressions.size(); firstWildcardIndex++) { - String expression = expressions.get(firstWildcardIndex); - if (isWildcard(expression)) { - break; - } - } - if (firstWildcardIndex == expressions.size()) { - return expressions; - } - Set result = new HashSet<>(); - for (int i = 0; i < firstWildcardIndex; i++) { - result.add(expressions.get(i)); - } - AtomicBoolean emptyWildcardExpansion = context.getOptions().allowNoIndices() ? null : new AtomicBoolean(); - for (int i = firstWildcardIndex; i < expressions.size(); i++) { - String expression = expressions.get(i); - boolean isExclusion = i > firstWildcardIndex && expression.charAt(0) == '-'; - if (i == firstWildcardIndex || isWildcard(expression)) { - Stream matchingResources = matchResourcesToWildcard( - context, - isExclusion ? expression.substring(1) : expression - ); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); - if (emptyWildcardExpansion != null) { - emptyWildcardExpansion.set(true); - matchingOpenClosedNames = matchingOpenClosedNames.peek(x -> emptyWildcardExpansion.set(false)); - } - if (isExclusion) { - matchingOpenClosedNames.forEach(result::remove); - } else { - matchingOpenClosedNames.forEach(result::add); - } - if (emptyWildcardExpansion != null && emptyWildcardExpansion.get()) { - throw notFoundException(expression); - } - } else { - if (isExclusion) { - result.remove(expression.substring(1)); - } else { - result.add(expression); - } - } - } - return result; - } - private static IndexMetadata.State excludeState(IndicesOptions options) { final IndexMetadata.State excludeState; if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { @@ -1366,55 +1380,82 @@ private static IndexMetadata.State excludeState(IndicesOptions options) { } /** - * Given a single wildcard {@param expression}, return the {@code Stream} that contains all the resources (i.e. indices, aliases, - * and datastreams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's + * Given a single wildcard {@param expression}, return a {@code Set} that contains all the resources (i.e. indices, aliases, + * and data streams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's * name matches the {@param expression} wildcard). * The {@param context} provides the current time-snapshot view of cluster state, as well as conditions - * on whether to consider alias, datastream, system, and hidden resources. - * It does NOT consider the open or closed status of index resources. + * on whether to consider alias, data stream, system, and hidden resources. */ - private static Stream matchResourcesToWildcard(Context context, String wildcardExpression) { + static Set matchWildcardToResources(Context context, String wildcardExpression) { assert isWildcard(wildcardExpression); final SortedMap indicesLookup = context.getState().getMetadata().getIndicesLookup(); - Stream matchesStream; + Set matchedResources = new HashSet<>(); + // this applies an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" if (Regex.isSuffixMatchPattern(wildcardExpression)) { - // this is an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" - matchesStream = filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values().stream(); - } else { - matchesStream = indicesLookup.values().stream(); - if (Regex.isMatchAllPattern(wildcardExpression) == false) { - matchesStream = matchesStream.filter( - indexAbstraction -> Regex.simpleMatch(wildcardExpression, indexAbstraction.getName()) - ); + for (IndexAbstraction ia : filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); + } + return matchedResources; + } + // In case of match all it fetches all index abstractions + if (Regex.isMatchAllPattern(wildcardExpression)) { + for (IndexAbstraction ia : indicesLookup.values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); } + return matchedResources; } - if (context.getOptions().ignoreAliases()) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.getType() != Type.ALIAS); + for (IndexAbstraction indexAbstraction : indicesLookup.values()) { + if (Regex.simpleMatch(wildcardExpression, indexAbstraction.getName())) { + maybeAddToResult(context, wildcardExpression, indexAbstraction, matchedResources); + } } - if (context.includeDataStreams() == false) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isDataStreamRelated() == false); + return matchedResources; + } + + private static void maybeAddToResult( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction, + Set matchedResources + ) { + if (shouldExpandToIndexAbstraction(context, wildcardExpression, indexAbstraction)) { + matchedResources.addAll(expandToOpenClosed(context, indexAbstraction)); } - // historic, i.e. not net-new, system indices are included irrespective of the system access predicate - // the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || (indexAbstraction.getType() != Type.DATA_STREAM - && indexAbstraction.getParentDataStream() == null - && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false) - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); + } + + /** + * Checks if this index abstraction should be included because it matched the wildcard expression. + * @param context the options of this request that influence the decision if this index abstraction should be included in the result + * @param wildcardExpression the wildcard expression that matched this index abstraction + * @param indexAbstraction the index abstraction in question + * @return true, if the index abstraction should be included in the result + */ + private static boolean shouldExpandToIndexAbstraction( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction + ) { + if (context.getOptions().ignoreAliases() && indexAbstraction.getType() == Type.ALIAS) { + return false; + } + if (context.includeDataStreams() == false && indexAbstraction.isDataStreamRelated()) { + return false; + } + + if (indexAbstraction.isSystem() + && SystemResourceAccess.shouldExpandToSystemIndexAbstraction(context, indexAbstraction) == false) { + return false; + } + if (context.getOptions().expandWildcardsHidden() == false) { - if (wildcardExpression.startsWith(".")) { - // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also - // starts with "." - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isHidden() == false || indexAbstraction.getName().startsWith(".") - ); - } else { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isHidden() == false); + // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also + // starts with "." + if (indexAbstraction.isHidden() + && (wildcardExpression.startsWith(".") && indexAbstraction.getName().startsWith(".")) == false) { + return false; } } - return matchesStream; + return true; } private static Map filterIndicesLookupForSuffixWildcard( @@ -1430,35 +1471,39 @@ private static Map filterIndicesLookupForSuffixWildcar } /** - * Return the {@code Stream} of open and/or closed index names for the given {@param resources}. + * Return the {@code Set} of open and/or closed index names for the given {@param resources}. * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); - return resources.flatMap(indexAbstraction -> { - if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); - } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); - } else { - Stream indicesStateStream = Stream.of(); - if (shouldIncludeRegularIndices(context.getOptions())) { - indicesStateStream = indexAbstraction.getIndices().stream().map(context.state.metadata()::index); - } - if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { - DataStream dataStream = (DataStream) indexAbstraction; - indicesStateStream = Stream.concat( - indicesStateStream, - dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) - ); + Set resources = new HashSet<>(); + if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { + resources.add(indexAbstraction.getName()); + } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { + resources.add(indexAbstraction.getName()); + } else { + if (shouldIncludeRegularIndices(context.getOptions())) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - if (excludeState != null) { - indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); + } + if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { + DataStream dataStream = (DataStream) indexAbstraction; + for (int i = 0, n = dataStream.getFailureIndices().getIndices().size(); i < n; i++) { + Index index = dataStream.getFailureIndices().getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } - }); + } + return resources; } private static List resolveEmptyOrTrivialWildcard(Context context) { @@ -1471,26 +1516,26 @@ private static List resolveEmptyOrTrivialWildcard(Context context) { } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { - return Arrays.stream(allIndices).filter(name -> { - if (name.startsWith(".")) { - IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); - assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; - if (abstraction.isSystem()) { - if (context.netNewSystemIndexPredicate.test(name)) { - if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { - return false; - } else { - return context.systemIndexAccessPredicate.test(name); - } - } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { - return context.systemIndexAccessPredicate.test(name); - } - } else { - return true; - } + List filteredIndices = new ArrayList<>(allIndices.length); + for (int i = 0; i < allIndices.length; i++) { + if (shouldIncludeIndexAbstraction(context, allIndices[i])) { + filteredIndices.add(allIndices[i]); } + } + return filteredIndices; + } + + private static boolean shouldIncludeIndexAbstraction(Context context, String name) { + if (name.startsWith(".") == false) { return true; - }).toList(); + } + + IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); + assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; + if (abstraction.isSystem() == false) { + return true; + } + return SystemResourceAccess.isSystemIndexAbstractionAccessible(context, abstraction); } private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions options, Metadata metadata) { @@ -1513,8 +1558,39 @@ private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions return Strings.EMPTY_ARRAY; } } + + static boolean isWildcard(String expression) { + return Regex.isSimpleMatchPattern(expression); + } + + static boolean hasWildcards(String[] expressions) { + for (int i = 0; i < expressions.length; i++) { + if (isWildcard(expressions[i])) { + return true; + } + } + return false; + } + } + + /** + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression) { + return DateMathExpressionResolver.resolveExpression(dateExpression); + } + + /** + * @param time instant to consider when parsing the expression + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression, long time) { + return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); } + /** + * Resolves a date math expression based on the requested time. + */ public static final class DateMathExpressionResolver { private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); @@ -1530,35 +1606,18 @@ private DateMathExpressionResolver() { } /** - * Resolves date math expressions. If this is a noop the given {@code expressions} list is returned without copying. - * As a result callers of this method should not mutate the returned list. Mutating it may come with unexpected side effects. + * Resolves a date math expression using the current time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. */ - public static List resolve(Context context, List expressions) { - boolean wildcardSeen = false; - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - String[] result = null; - for (int i = 0, n = expressions.size(); i < n; i++) { - String expression = expressions.get(i); - // accepts date-math exclusions that are of the form "-<...{}>",f i.e. the "-" is outside the "<>" date-math template - boolean isExclusion = wildcardSeen && expression.startsWith("-"); - wildcardSeen = wildcardSeen || (expandWildcards && isWildcard(expression)); - String toResolve = isExclusion ? expression.substring(1) : expression; - String resolved = resolveExpression(toResolve, context::getStartTime); - if (toResolve != resolved) { - if (result == null) { - result = expressions.toArray(Strings.EMPTY_ARRAY); - } - result[i] = isExclusion ? "-" + resolved : resolved; - } - } - return result == null ? expressions : Arrays.asList(result); - } - - static String resolveExpression(String expression) { + public static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(String expression, LongSupplier getTime) { + /** + * Resolves a date math expression using the provided time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. + */ + public static String resolveExpression(String expression, LongSupplier getTime) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; } @@ -1707,135 +1766,133 @@ private static String doResolveExpression(String expression, LongSupplier getTim } } - public static final class ExplicitResourceNameFilter { + /** + * In this class we collect the system access relevant code. The helper methods provide the following functionalities: + * - determining the access to a system index abstraction + * - verifying the access to system abstractions and adding the necessary warnings + * - determining the access to a system index based on its name + * WARNING: we have observed differences in how the access is determined. For now this behaviour is documented and preserved. + */ + public static final class SystemResourceAccess { - private ExplicitResourceNameFilter() { + private SystemResourceAccess() { // Utility class } /** - * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. - * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. + * Checks if this system index abstraction should be included when resolving via {@link + * IndexNameExpressionResolver.WildcardExpressionResolver#resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context, String[])}. + * NOTE: it behaves differently than {@link SystemResourceAccess#shouldExpandToSystemIndexAbstraction(Context, IndexAbstraction)} + * because in the case that the access level is BACKWARDS_COMPATIBLE_ONLY it does not include the net-new indices, this is + * questionable. */ - public static List filterUnavailable(Context context, List expressions) { - ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - boolean wildcardSeen = false; - List result = null; - for (int i = 0; i < expressions.size(); i++) { - String expression = expressions.get(i); - if (Strings.isEmpty(expression)) { - throw notFoundException(expression); - } - // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API - // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, - // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown - // if the expression can't be found. - if (expression.charAt(0) == '_') { - throw new InvalidIndexNameException(expression, "must not start with '_'."); - } - final boolean isWildcard = expandWildcards && isWildcard(expression); - if (isWildcard || (wildcardSeen && expression.charAt(0) == '-') || ensureAliasOrIndexExists(context, expression)) { - if (result != null) { - result.add(expression); - } + public static boolean isSystemIndexAbstractionAccessible(Context context, IndexAbstraction abstraction) { + assert abstraction.isSystem() : "We should only check this for system resources"; + if (context.netNewSystemIndexPredicate.test(abstraction.getName())) { + if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { + return false; } else { - if (result == null) { - result = new ArrayList<>(expressions.size() - 1); - result.addAll(expressions.subList(0, i)); - } + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - wildcardSeen |= isWildcard; + } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - return result == null ? expressions : result; + return true; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of - * exception. + * Historic, i.e. not net-new, system indices are included irrespective of the system access predicate + * the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature. + * A historic system resource, can only be an index since system data streams were added later. */ - @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { - boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); - IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); - if (indexAbstraction == null) { - if (ignoreUnavailable) { - return false; - } else { - throw notFoundException(name); - } - } - // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) - if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { - if (ignoreUnavailable) { - return false; - } else { - throw aliasesNotSupportedException(name); - } - } - if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { - if (ignoreUnavailable) { - return false; - } else { - IndexNotFoundException infe = notFoundException(name); - // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. - infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); - throw infe; - } - } - return true; + private static boolean shouldExpandToSystemIndexAbstraction(Context context, IndexAbstraction indexAbstraction) { + assert indexAbstraction.isSystem() : "We should only check this for system resources"; + boolean isHistoric = indexAbstraction.getType() != Type.DATA_STREAM + && indexAbstraction.getParentDataStream() == null + && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false; + return isHistoric || context.systemIndexAccessPredicate.test(indexAbstraction.getName()); } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { - if (options.ignoreUnavailable()) { + /** + * Checks if any system indices that should not have been accessible according to the + * {@link Context#getSystemIndexAccessPredicate()} are accessed, and it performs the following actions: + * - if there are historic (aka not net-new) system indices, then it adds a deprecation warning + * - if it contains net-new system indices or system data streams, it throws an exception. + */ + private static void checkSystemIndexAccess(Context context, ThreadContext threadContext, Index... concreteIndices) { + final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); + if (systemIndexAccessPredicate == Predicates.always()) { return; } - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); - } - } + doCheckSystemIndexAccess(context, systemIndexAccessPredicate, threadContext, concreteIndices); } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { - List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - crossClusterIndices.add(index); + private static void doCheckSystemIndexAccess( + Context context, + Predicate systemIndexAccessPredicate, + ThreadContext threadContext, + Index... concreteIndices + ) { + final Metadata metadata = context.getState().metadata(); + final List resolvedSystemIndices = new ArrayList<>(); + final List resolvedNetNewSystemIndices = new ArrayList<>(); + final Set resolvedSystemDataStreams = new HashSet<>(); + final SortedMap indicesLookup = metadata.getIndicesLookup(); + boolean matchedIndex = false; + for (int i = 0; i < concreteIndices.length; i++) { + Index concreteIndex = concreteIndices[i]; + IndexMetadata idxMetadata = metadata.index(concreteIndex); + String name = concreteIndex.getName(); + if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { + matchedIndex = true; + IndexAbstraction indexAbstraction = indicesLookup.get(name); + if (indexAbstraction.getParentDataStream() != null) { + resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); + } else if (context.netNewSystemIndexPredicate.test(name)) { + resolvedNetNewSystemIndices.add(name); + } else { + resolvedSystemIndices.add(name); + } } } - throw new IllegalArgumentException( - "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices - ); - } - } - - /** - * This is a context for the DateMathExpressionResolver which does not require {@code IndicesOptions} or {@code ClusterState} - * since it uses only the start time to resolve expressions. - */ - public static final class ResolverContext extends Context { - public ResolverContext() { - this(System.currentTimeMillis()); - } - - public ResolverContext(long startTime) { - super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, Predicates.never(), Predicates.never()); + if (matchedIndex) { + handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices, threadContext); + } } - @Override - public ClusterState getState() { - throw new UnsupportedOperationException("should never be called"); + private static void handleMatchedSystemIndices( + List resolvedSystemIndices, + Set resolvedSystemDataStreams, + List resolvedNetNewSystemIndices, + ThreadContext threadContext + ) { + if (resolvedSystemIndices.isEmpty() == false) { + Collections.sort(resolvedSystemIndices); + deprecationLogger.warn( + DeprecationCategory.API, + "open_system_index_access", + "this request accesses system indices: {}, but in a future major version, direct access to system " + + "indices will be prevented by default", + resolvedSystemIndices + ); + } + if (resolvedSystemDataStreams.isEmpty() == false) { + throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); + } + if (resolvedNetNewSystemIndices.isEmpty() == false) { + throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); + } } - @Override - public IndicesOptions getOptions() { - throw new UnsupportedOperationException("should never be called"); + /** + * Used in {@link IndexNameExpressionResolver#shouldTrackConcreteIndex(Context, Index)} to exclude net-new indices + * when we are in backwards compatible only access level. + * This also feels questionable as well. + */ + private static boolean isNetNewInBackwardCompatibleMode(Context context, Index index) { + return context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY + && context.netNewSystemIndexPredicate.test(index.getName()); } } - private static boolean isWildcard(String expression) { - return Regex.isSimpleMatchPattern(expression); - } } diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index ddcfc1ea7eed8..51e6512072e41 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -74,6 +74,29 @@ static ByteBuffer[] toByteBuffers(BytesReference reference) { } } + /** + * Allocates new buffer and copy bytes from given BytesReference. + * + * @deprecated copying bytes is a right place for performance regression and unnecessary allocations. + * This method exists to serve very few places that struggle to handle reference counted buffers. + */ + @Deprecated(forRemoval = true) + static BytesReference copyBytes(BytesReference bytesReference) { + byte[] arr = new byte[bytesReference.length()]; + int offset = 0; + final BytesRefIterator iterator = bytesReference.iterator(); + try { + BytesRef slice; + while ((slice = iterator.next()) != null) { + System.arraycopy(slice.bytes, slice.offset, arr, offset, slice.length); + offset += slice.length; + } + return new BytesArray(arr); + } catch (IOException e) { + throw new AssertionError(e); + } + } + /** * Returns BytesReference composed of the provided ByteBuffers. */ diff --git a/server/src/main/java/org/elasticsearch/http/HttpBody.java b/server/src/main/java/org/elasticsearch/http/HttpBody.java index a10487502ed3c..6571125677fab 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpBody.java +++ b/server/src/main/java/org/elasticsearch/http/HttpBody.java @@ -9,7 +9,6 @@ package org.elasticsearch.http; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.Nullable; @@ -21,11 +20,11 @@ public sealed interface HttpBody extends Releasable permits HttpBody.Full, HttpBody.Stream { static Full fromBytesReference(BytesReference bytesRef) { - return new ByteRefHttpBody(bytesRef); + return new ByteRefHttpBody(ReleasableBytesReference.wrap(bytesRef)); } static Full empty() { - return new ByteRefHttpBody(BytesArray.EMPTY); + return new ByteRefHttpBody(ReleasableBytesReference.empty()); } default boolean isFull() { @@ -56,7 +55,7 @@ default Stream asStream() { * Full content represents a complete http body content that can be accessed immediately. */ non-sealed interface Full extends HttpBody { - BytesReference bytes(); + ReleasableBytesReference bytes(); @Override default void close() {} @@ -114,5 +113,5 @@ interface ChunkHandler extends Releasable { default void close() {} } - record ByteRefHttpBody(BytesReference bytes) implements Full {} + record ByteRefHttpBody(ReleasableBytesReference bytes) implements Full {} } diff --git a/server/src/main/java/org/elasticsearch/http/HttpRequest.java b/server/src/main/java/org/elasticsearch/http/HttpRequest.java index ca6e51f2cec08..b4b1bb84433c9 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpRequest.java +++ b/server/src/main/java/org/elasticsearch/http/HttpRequest.java @@ -52,10 +52,4 @@ enum HttpVersion { */ void release(); - /** - * If this instances uses any pooled resources, creates a copy of this instance that does not use any pooled resources and releases - * any resources associated with this instance. If the instance does not use any shared resources, returns itself. - * @return a safe unpooled http request - */ - HttpRequest releaseAndCopy(); } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 3d8360e6ee3fa..d6daf11c0539a 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -94,7 +94,7 @@ HttpTracer maybeLogRequest(RestRequest restRequest, @Nullable Exception e) { private void logFullContent(RestRequest restRequest) { try (var stream = HttpBodyTracer.getBodyOutputStream(restRequest.getRequestId(), HttpBodyTracer.Type.REQUEST)) { - restRequest.content().writeTo(stream); + restRequest.releasableContent().writeTo(stream); } catch (Exception e2) { assert false : e2; // no real IO here } diff --git a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java index f80e8a89f5cf2..43ae38fea6018 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexingPressure.java +++ b/server/src/main/java/org/elasticsearch/index/IndexingPressure.java @@ -105,6 +105,9 @@ public class IndexingPressure { private final AtomicLong replicaRejections = new AtomicLong(0); private final AtomicLong primaryDocumentRejections = new AtomicLong(0); + private final AtomicLong lowWaterMarkSplits = new AtomicLong(0); + private final AtomicLong highWaterMarkSplits = new AtomicLong(0); + private final long lowWatermark; private final long lowWatermarkSize; private final long highWatermark; @@ -265,11 +268,20 @@ public Releasable markReplicaOperationStarted(int operations, long bytes, boolea public boolean shouldSplitBulk(long size) { long currentUsage = (currentCombinedCoordinatingAndPrimaryBytes.get() + currentReplicaBytes.get()); - return (currentUsage >= lowWatermark && size >= lowWatermarkSize) || (currentUsage >= highWatermark && size >= highWatermarkSize); + if (currentUsage >= highWatermark && size >= highWatermarkSize) { + highWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to high watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + if (currentUsage >= lowWatermark && size >= lowWatermarkSize) { + lowWaterMarkSplits.getAndIncrement(); + logger.trace(() -> Strings.format("Split bulk due to low watermark: current bytes [%d] and size [%d]", currentUsage, size)); + return (true); + } + return (false); } public IndexingPressureStats stats() { - // TODO: Update stats with new primary/replica/coordinating limits and add throttling stats return new IndexingPressureStats( totalCombinedCoordinatingAndPrimaryBytes.get(), totalCoordinatingBytes.get(), @@ -290,7 +302,9 @@ public IndexingPressureStats stats() { currentPrimaryOps.get(), currentReplicaOps.get(), primaryDocumentRejections.get(), - totalCoordinatingRequests.get() + totalCoordinatingRequests.get(), + lowWaterMarkSplits.get(), + highWaterMarkSplits.get() ); } } diff --git a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java index b5197274dd519..0a56db56b2c95 100644 --- a/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java +++ b/server/src/main/java/org/elasticsearch/index/stats/IndexingPressureStats.java @@ -36,6 +36,12 @@ public class IndexingPressureStats implements Writeable, ToXContentFragment { private final long primaryDocumentRejections; private final long memoryLimit; + /* Count number of splits due to SPLIT_BULK_LOW_WATERMARK and SPLIT_BULK_HIGH_WATERMARK + These 2 stats are not serialized via X content yet. + */ + private final long lowWaterMarkSplits; + private final long highWaterMarkSplits; + // These fields will be used for additional back-pressure and metrics in the future private final long totalCoordinatingOps; private final long totalCoordinatingRequests; @@ -85,6 +91,14 @@ public IndexingPressureStats(StreamInput in) throws IOException { } else { totalCoordinatingRequests = -1L; } + + if (in.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + lowWaterMarkSplits = in.readVLong(); + highWaterMarkSplits = in.readVLong(); + } else { + lowWaterMarkSplits = -1L; + highWaterMarkSplits = -1L; + } } public IndexingPressureStats( @@ -107,7 +121,9 @@ public IndexingPressureStats( long currentPrimaryOps, long currentReplicaOps, long primaryDocumentRejections, - long totalCoordinatingRequests + long totalCoordinatingRequests, + long lowWaterMarkSplits, + long highWaterMarkSplits ) { this.totalCombinedCoordinatingAndPrimaryBytes = totalCombinedCoordinatingAndPrimaryBytes; this.totalCoordinatingBytes = totalCoordinatingBytes; @@ -131,6 +147,9 @@ public IndexingPressureStats( this.primaryDocumentRejections = primaryDocumentRejections; this.totalCoordinatingRequests = totalCoordinatingRequests; + + this.lowWaterMarkSplits = lowWaterMarkSplits; + this.highWaterMarkSplits = highWaterMarkSplits; } @Override @@ -160,6 +179,11 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_15_0)) { out.writeVLong(totalCoordinatingRequests); } + + if (out.getTransportVersion().onOrAfter(TransportVersions.INDEXING_PRESSURE_THROTTLING_STATS)) { + out.writeVLong(lowWaterMarkSplits); + out.writeVLong(highWaterMarkSplits); + } } public long getTotalCombinedCoordinatingAndPrimaryBytes() { @@ -242,6 +266,14 @@ public long getTotalCoordinatingRequests() { return totalCoordinatingRequests; } + public long getHighWaterMarkSplits() { + return highWaterMarkSplits; + } + + public long getLowWaterMarkSplits() { + return lowWaterMarkSplits; + } + private static final String COMBINED = "combined_coordinating_and_primary"; private static final String COMBINED_IN_BYTES = "combined_coordinating_and_primary_in_bytes"; private static final String COORDINATING = "coordinating"; diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 856b30d1c19e8..42cda4da1a9e6 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -110,7 +110,7 @@ public class SystemIndices { public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed"; public static final String EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_external_system_index_access_origin"; - public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-8"; + public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-9"; private static final Automaton EMPTY = Automata.makeEmpty(); diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index acc26a42e4745..94395193622e0 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -643,6 +643,34 @@ private void registerAsyncMetrics(MeterRegistry registry) { ) ); + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.low_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_LOW_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getLowWaterMarkSplits) + .orElse(0L) + ) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating.high_watermark_splits.total", + "Total number of times bulk requests are split due to SPLIT_BULK_HIGH_WATERMARK", + "operations", + () -> new LongWithAttributes( + Optional.ofNullable(stats.getOrRefresh()) + .map(NodeStats::getIndexingPressureStats) + .map(IndexingPressureStats::getHighWaterMarkSplits) + .orElse(0L) + ) + ) + ); + metrics.add( registry.registerLongAsyncCounter( "es.flush.total.time", diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index ec4a534fc883b..80c9aafaa84b4 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -69,6 +69,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.MetadataUpgrader; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.repositories.RepositoriesService; @@ -180,8 +181,8 @@ public class Node implements Closeable { * * @param environment the initial environment for this node, which will be added to by plugins */ - public Node(Environment environment) { - this(NodeConstruction.prepareConstruction(environment, new NodeServiceProvider(), true)); + public Node(Environment environment, PluginsLoader pluginsLoader) { + this(NodeConstruction.prepareConstruction(environment, pluginsLoader, new NodeServiceProvider(), true)); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index caf65c05cf27d..2488ac894a612 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -164,6 +164,7 @@ import org.elasticsearch.plugins.NetworkPlugin; import org.elasticsearch.plugins.PersistentTaskPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.plugins.RecoveryPlannerPlugin; import org.elasticsearch.plugins.ReloadablePlugin; @@ -186,6 +187,7 @@ import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.rest.action.search.SearchResponseMetrics; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -260,6 +262,7 @@ class NodeConstruction { */ static NodeConstruction prepareConstruction( Environment initialEnvironment, + PluginsLoader pluginsLoader, NodeServiceProvider serviceProvider, boolean forbidPrivateIndexSettings ) { @@ -267,7 +270,7 @@ static NodeConstruction prepareConstruction( try { NodeConstruction constructor = new NodeConstruction(closeables); - Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider); + Settings settings = constructor.createEnvironment(initialEnvironment, serviceProvider, pluginsLoader); constructor.loadLoggingDataProviders(); TelemetryProvider telemetryProvider = constructor.createTelemetryProvider(settings); ThreadPool threadPool = constructor.createThreadPool(settings, telemetryProvider.getMeterRegistry()); @@ -400,7 +403,7 @@ private static Optional getSinglePlugin(Stream plugins, Class plugi return Optional.of(plugin); } - private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider) { + private Settings createEnvironment(Environment initialEnvironment, NodeServiceProvider serviceProvider, PluginsLoader pluginsLoader) { // Pass the node settings to the DeprecationLogger class so that it can have the deprecation.skip_deprecated_settings setting: Settings envSettings = initialEnvironment.settings(); DeprecationLogger.initialize(envSettings); @@ -473,7 +476,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr (e, apmConfig) -> logger.error("failed to delete temporary APM config file [{}], reason: [{}]", apmConfig, e.getMessage()) ); - pluginsService = serviceProvider.newPluginService(initialEnvironment, envSettings); + pluginsService = serviceProvider.newPluginService(initialEnvironment, pluginsLoader); modules.bindToInstance(PluginsService.class, pluginsService); Settings settings = Node.mergePluginSettings(pluginsService.pluginMap(), envSettings); @@ -1030,10 +1033,12 @@ private void construct( actionModule.getReservedClusterStateService().installStateHandler(new ReservedRepositoryAction(repositoriesService)); actionModule.getReservedClusterStateService().installStateHandler(new ReservedPipelineAction()); + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(); FileSettingsService fileSettingsService = new FileSettingsService( clusterService, actionModule.getReservedClusterStateService(), - environment + environment, + fileSettingsHealthIndicatorService ); RestoreService restoreService = new RestoreService( @@ -1127,7 +1132,8 @@ private void construct( featureService, threadPool, telemetryProvider, - repositoriesService + repositoriesService, + fileSettingsHealthIndicatorService ) ); @@ -1299,7 +1305,8 @@ private Module loadDiagnosticServices( FeatureService featureService, ThreadPool threadPool, TelemetryProvider telemetryProvider, - RepositoriesService repositoriesService + RepositoriesService repositoriesService, + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService ) { MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); @@ -1314,7 +1321,8 @@ private Module loadDiagnosticServices( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService, featureService), new DiskHealthIndicatorService(clusterService, featureService), - new ShardsCapacityHealthIndicatorService(clusterService, featureService) + new ShardsCapacityHealthIndicatorService(clusterService, featureService), + fileSettingsHealthIndicatorService ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()); diff --git a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java index f18655afb8f02..8f2dc4e532ae0 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java +++ b/server/src/main/java/org/elasticsearch/node/NodeServiceProvider.java @@ -27,6 +27,7 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.ReadinessService; import org.elasticsearch.script.ScriptContext; @@ -51,9 +52,9 @@ */ class NodeServiceProvider { - PluginsService newPluginService(Environment environment, Settings settings) { + PluginsService newPluginService(Environment initialEnvironment, PluginsLoader pluginsLoader) { // this creates a PluginsService with an empty list of classpath plugins - return new PluginsService(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + return new PluginsService(initialEnvironment.settings(), initialEnvironment.configFile(), pluginsLoader); } ScriptService newScriptService( diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java new file mode 100644 index 0000000000000..6b3eda6c0c9b4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsLoader.java @@ -0,0 +1,461 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.jdk.JarHell; +import org.elasticsearch.jdk.ModuleQualifiedExportsService; + +import java.io.IOException; +import java.lang.ModuleLayer.Controller; +import java.lang.module.Configuration; +import java.lang.module.ModuleFinder; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Stream; + +import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; +import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; + +/** + * This class is used to load modules and module layers for each plugin during + * node initialization prior to enablement of entitlements. This allows entitlements + * to have all the plugin information they need prior to starting. + */ +public class PluginsLoader { + + /** + * Contains information about the {@link ClassLoader} required to load a plugin + */ + public interface PluginLayer { + /** + * @return Information about the bundle of jars used in this plugin + */ + PluginBundle pluginBundle(); + + /** + * @return The {@link ClassLoader} used to instantiate the main class for the plugin + */ + ClassLoader pluginClassLoader(); + } + + /** + * Contains information about the {@link ClassLoader}s and {@link ModuleLayer} required for loading a plugin + * @param pluginBundle Information about the bundle of jars used in this plugin + * @param pluginClassLoader The {@link ClassLoader} used to instantiate the main class for the plugin + * @param spiClassLoader The exported {@link ClassLoader} visible to other Java modules + * @param spiModuleLayer The exported {@link ModuleLayer} visible to other Java modules + */ + private record LoadedPluginLayer( + PluginBundle pluginBundle, + ClassLoader pluginClassLoader, + ClassLoader spiClassLoader, + ModuleLayer spiModuleLayer + ) implements PluginLayer { + + public LoadedPluginLayer { + Objects.requireNonNull(pluginBundle); + Objects.requireNonNull(pluginClassLoader); + Objects.requireNonNull(spiClassLoader); + Objects.requireNonNull(spiModuleLayer); + } + } + + /** + * Tuple of module layer and loader. + * Modular Plugins have a plugin specific loader and layer. + * Non-Modular plugins have a plugin specific loader and the boot layer. + */ + public record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { + + public LayerAndLoader { + Objects.requireNonNull(layer); + Objects.requireNonNull(loader); + } + + public static LayerAndLoader ofLoader(ClassLoader loader) { + return new LayerAndLoader(ModuleLayer.boot(), loader); + } + } + + private static final Logger logger = LogManager.getLogger(PluginsLoader.class); + private static final Module serverModule = PluginsLoader.class.getModule(); + + private final List moduleDescriptors; + private final List pluginDescriptors; + private final Map loadedPluginLayers; + + /** + * Constructs a new PluginsLoader + * + * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem + * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + */ + @SuppressWarnings("this-escape") + public PluginsLoader(Path modulesDirectory, Path pluginsDirectory) { + + Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); + addServerExportsService(qualifiedExports); + + Set seenBundles = new LinkedHashSet<>(); + + // load (elasticsearch) module layers + if (modulesDirectory != null) { + try { + Set modules = PluginsUtils.getModuleBundles(modulesDirectory); + moduleDescriptors = modules.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(modules); + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize modules", ex); + } + } else { + moduleDescriptors = Collections.emptyList(); + } + + // load plugin layers + if (pluginsDirectory != null) { + try { + // TODO: remove this leniency, but tests bogusly rely on it + if (isAccessibleDirectory(pluginsDirectory, logger)) { + PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); + Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); + pluginDescriptors = plugins.stream().map(PluginBundle::pluginDescriptor).toList(); + seenBundles.addAll(plugins); + } else { + pluginDescriptors = Collections.emptyList(); + } + } catch (IOException ex) { + throw new IllegalStateException("Unable to initialize plugins", ex); + } + } else { + pluginDescriptors = Collections.emptyList(); + } + + this.loadedPluginLayers = Collections.unmodifiableMap(loadPluginLayers(seenBundles, qualifiedExports)); + } + + public List moduleDescriptors() { + return moduleDescriptors; + } + + public List pluginDescriptors() { + return pluginDescriptors; + } + + public Stream pluginLayers() { + return loadedPluginLayers.values().stream().map(Function.identity()); + } + + private Map loadPluginLayers( + Set bundles, + Map> qualifiedExports + ) { + Map loaded = new LinkedHashMap<>(); + Map> transitiveUrls = new HashMap<>(); + List sortedBundles = PluginsUtils.sortBundles(bundles); + if (sortedBundles.isEmpty() == false) { + Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); + for (PluginBundle bundle : sortedBundles) { + PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); + loadPluginLayer(bundle, loaded, qualifiedExports); + } + } + + return loaded; + } + + private void loadPluginLayer( + PluginBundle bundle, + Map loaded, + Map> qualifiedExports + ) { + String name = bundle.plugin.getName(); + logger.debug(() -> "Loading bundle: " + name); + + PluginsUtils.verifyCompatibility(bundle.plugin); + + // collect the list of extended plugins + List extendedPlugins = new ArrayList<>(); + for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { + LoadedPluginLayer extendedPlugin = loaded.get(extendedPluginName); + assert extendedPlugin != null; + assert extendedPlugin.spiClassLoader() != null : "All non-classpath plugins should be loaded with a classloader"; + extendedPlugins.add(extendedPlugin); + } + + final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( + getClass().getClassLoader(), + extendedPlugins.stream().map(LoadedPluginLayer::spiClassLoader).toList() + ); + LayerAndLoader spiLayerAndLoader = null; + if (bundle.hasSPI()) { + spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); + } + + final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); + final LayerAndLoader pluginLayerAndLoader = createPlugin( + bundle, + pluginParentLoader, + extendedPlugins, + spiLayerAndLoader, + qualifiedExports + ); + final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); + + if (spiLayerAndLoader == null) { + // use full implementation for plugins extending this one + spiLayerAndLoader = pluginLayerAndLoader; + } + + loaded.put(name, new LoadedPluginLayer(bundle, pluginClassLoader, spiLayerAndLoader.loader, spiLayerAndLoader.layer)); + } + + static LayerAndLoader createSPI( + PluginBundle bundle, + ClassLoader parentLoader, + List extendedPlugins, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); + return createSpiModuleLayer( + bundle.spiUrls, + parentLoader, + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer).toList(), + qualifiedExports + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); + } + } + + static LayerAndLoader createPlugin( + PluginBundle bundle, + ClassLoader pluginParentLoader, + List extendedPlugins, + LayerAndLoader spiLayerAndLoader, + Map> qualifiedExports + ) { + final PluginDescriptor plugin = bundle.plugin; + if (plugin.getModuleName().isPresent()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); + var parentLayers = Stream.concat( + Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), + extendedPlugins.stream().map(LoadedPluginLayer::spiModuleLayer) + ).toList(); + return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); + } else if (plugin.isStable()) { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); + return LayerAndLoader.ofLoader( + UberModuleClassLoader.getInstance( + pluginParentLoader, + ModuleLayer.boot(), + "synthetic." + toModuleName(plugin.getName()), + bundle.allUrls, + Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules + ) + ); + } else { + logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); + return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); + } + } + + static LayerAndLoader createSpiModuleLayer( + Set urls, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + // assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + null, // no entry point + spiModuleName(urls), + urlsToPaths(urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createPluginModuleLayer( + PluginBundle bundle, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + assert bundle.plugin.getModuleName().isPresent(); + return createModuleLayer( + bundle.plugin.getClassname(), + bundle.plugin.getModuleName().get(), + urlsToPaths(bundle.urls), + parentLoader, + parentLayers, + qualifiedExports + ); + } + + static LayerAndLoader createModuleLayer( + String className, + String moduleName, + Path[] paths, + ClassLoader parentLoader, + List parentLayers, + Map> qualifiedExports + ) { + logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); + var finder = ModuleFinder.of(paths); + + var configuration = Configuration.resolveAndBind( + ModuleFinder.of(), + parentConfigurationOrBoot(parentLayers), + finder, + Set.of(moduleName) + ); + var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); + var pluginModule = controller.layer().findModule(moduleName).get(); + ensureEntryPointAccessible(controller, pluginModule, className); + // export/open upstream modules to this plugin module + exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); + // configure qualified exports/opens to other modules/plugins + addPluginExportsServices(qualifiedExports, controller); + logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); + return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); + } + + /** Determines the module name of the SPI module, given its URL. */ + static String spiModuleName(Set spiURLS) { + ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); + var mrefs = finder.findAll(); + assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; + return mrefs.stream().findFirst().get().descriptor().name(); + } + + // package-visible for testing + static String toModuleName(String name) { + String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots + .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start + .replaceAll("\\.$", "") // trim trailing dot + .toLowerCase(Locale.getDefault()); + assert ModuleSupport.isPackageName(result); + return result; + } + + static final String toPackageName(String className) { + assert className.endsWith(".") == false; + int index = className.lastIndexOf('.'); + if (index == -1) { + throw new IllegalStateException("invalid class name:" + className); + } + return className.substring(0, index); + } + + @SuppressForbidden(reason = "I need to convert URL's to Paths") + static final Path[] urlsToPaths(Set urls) { + return urls.stream().map(PluginsLoader::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); + } + + static final URI uncheckedToURI(URL url) { + try { + return url.toURI(); + } catch (URISyntaxException e) { + throw new AssertionError(new IOException(e)); + } + } + + private static List parentConfigurationOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot().configuration()); + } else { + return parentLayers.stream().map(ModuleLayer::configuration).toList(); + } + } + + /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ + private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { + if (className != null) { + controller.addOpens(pluginModule, toPackageName(className), serverModule); + } + } + + @SuppressWarnings("removal") + static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { + return AccessController.doPrivileged( + (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) + ); + } + + @SuppressWarnings("removal") + static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { + return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); + } + + private static List parentLayersOrBoot(List parentLayers) { + if (parentLayers == null || parentLayers.isEmpty()) { + return List.of(ModuleLayer.boot()); + } else { + return parentLayers; + } + } + + protected void addServerExportsService(Map> qualifiedExports) { + var exportsService = new ModuleQualifiedExportsService(serverModule) { + @Override + protected void addExports(String pkg, Module target) { + serverModule.addExports(pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + serverModule.addOpens(pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, serverModule.getName()); + } + + private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { + for (Module module : controller.layer().modules()) { + var exportsService = new ModuleQualifiedExportsService(module) { + @Override + protected void addExports(String pkg, Module target) { + controller.addExports(module, pkg, target); + } + + @Override + protected void addOpens(String pkg, Module target) { + controller.addOpens(module, pkg, target); + } + }; + addExportsService(qualifiedExports, exportsService, module.getName()); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java index d5dd6d62d615e..cfdb7aaf0b509 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsService.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsService.java @@ -23,34 +23,22 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.Tuple; -import org.elasticsearch.jdk.JarHell; -import org.elasticsearch.jdk.ModuleQualifiedExportsService; import org.elasticsearch.node.ReportingService; +import org.elasticsearch.plugins.PluginsLoader.PluginLayer; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.plugins.spi.SPIClassIterator; import java.io.IOException; -import java.lang.ModuleLayer.Controller; -import java.lang.module.Configuration; -import java.lang.module.ModuleFinder; import java.lang.reflect.Constructor; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.net.URLClassLoader; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; -import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -63,10 +51,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.addExportsService; -import static org.elasticsearch.jdk.ModuleQualifiedExportsService.exposeQualifiedExportsAndOpens; - public class PluginsService implements ReportingService { public StablePluginsRegistry getStablePluginRegistry() { @@ -77,33 +61,18 @@ public StablePluginsRegistry getStablePluginRegistry() { * A loaded plugin is one for which Elasticsearch has successfully constructed an instance of the plugin's class * @param descriptor Metadata about the plugin, usually loaded from plugin properties * @param instance The constructed instance of the plugin's main class - * @param loader The classloader for the plugin - * @param layer The module layer for the plugin */ - record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader loader, ModuleLayer layer) { + record LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { LoadedPlugin { Objects.requireNonNull(descriptor); Objects.requireNonNull(instance); - Objects.requireNonNull(loader); - Objects.requireNonNull(layer); - } - - /** - * Creates a loaded classpath plugin. A classpath plugin is a plugin loaded - * by the system classloader and defined to the unnamed module of the boot layer. - */ - LoadedPlugin(PluginDescriptor descriptor, Plugin instance) { - this(descriptor, instance, PluginsService.class.getClassLoader(), ModuleLayer.boot()); } } private static final Logger logger = LogManager.getLogger(PluginsService.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(PluginsService.class); - private final Settings settings; - private final Path configPath; - /** * We keep around a list of plugins and modules. The order of * this list is that which the plugins and modules were loaded in. @@ -117,69 +86,32 @@ record LoadedPlugin(PluginDescriptor descriptor, Plugin instance, ClassLoader lo /** * Constructs a new PluginService * - * @param settings The settings of the system - * @param modulesDirectory The directory modules exist in, or null if modules should not be loaded from the filesystem - * @param pluginsDirectory The directory plugins exist in, or null if plugins should not be loaded from the filesystem + * @param settings The settings for this node + * @param configPath The configuration path for this node + * @param pluginsLoader the information required to complete loading of plugins */ - @SuppressWarnings("this-escape") - public PluginsService(Settings settings, Path configPath, Path modulesDirectory, Path pluginsDirectory) { - this.settings = settings; - this.configPath = configPath; - - Map> qualifiedExports = new HashMap<>(ModuleQualifiedExportsService.getBootServices()); - addServerExportsService(qualifiedExports); - - Set seenBundles = new LinkedHashSet<>(); - - // load modules - List modulesList = new ArrayList<>(); - Set moduleNameList = new HashSet<>(); - if (modulesDirectory != null) { - try { - Set modules = PluginsUtils.getModuleBundles(modulesDirectory); - modules.stream().map(PluginBundle::pluginDescriptor).forEach(m -> { - modulesList.add(m); - moduleNameList.add(m.getName()); - }); - seenBundles.addAll(modules); - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize modules", ex); - } - } + public PluginsService(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = loadPluginBundles(settings, configPath, pluginsLoader); - // load plugins - List pluginsList = new ArrayList<>(); - if (pluginsDirectory != null) { - try { - // TODO: remove this leniency, but tests bogusly rely on it - if (isAccessibleDirectory(pluginsDirectory, logger)) { - PluginsUtils.checkForFailedPluginRemovals(pluginsDirectory); - Set plugins = PluginsUtils.getPluginBundles(pluginsDirectory); - plugins.stream().map(PluginBundle::pluginDescriptor).forEach(pluginsList::add); - seenBundles.addAll(plugins); - } - } catch (IOException ex) { - throw new IllegalStateException("Unable to initialize plugins", ex); - } - } - - LinkedHashMap loadedPlugins = loadBundles(seenBundles, qualifiedExports); + var modulesDescriptors = pluginsLoader.moduleDescriptors(); + var pluginDescriptors = pluginsLoader.pluginDescriptors(); var inspector = PluginIntrospector.getInstance(); - this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginsList, loadedPlugins), modulesList); + this.info = new PluginsAndModules(getRuntimeInfos(inspector, pluginDescriptors, loadedPlugins), modulesDescriptors); this.plugins = List.copyOf(loadedPlugins.values()); - checkDeprecations(inspector, pluginsList, loadedPlugins); + checkDeprecations(inspector, pluginDescriptors, loadedPlugins); checkMandatoryPlugins( - pluginsList.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), + pluginDescriptors.stream().map(PluginDescriptor::getName).collect(Collectors.toSet()), new HashSet<>(MANDATORY_SETTING.get(settings)) ); // we don't log jars in lib/ we really shouldn't log modules, // but for now: just be transparent so we can debug any potential issues + Set moduleNames = new HashSet<>(modulesDescriptors.stream().map(PluginDescriptor::getName).toList()); for (String name : loadedPlugins.keySet()) { - if (moduleNameList.contains(name)) { + if (moduleNames.contains(name)) { logger.info("loaded module [{}]", name); } else { logger.info("loaded plugin [{}]", name); @@ -282,23 +214,11 @@ protected List plugins() { return this.plugins; } - private LinkedHashMap loadBundles( - Set bundles, - Map> qualifiedExports - ) { - LinkedHashMap loaded = new LinkedHashMap<>(); - Map> transitiveUrls = new HashMap<>(); - List sortedBundles = PluginsUtils.sortBundles(bundles); - if (sortedBundles.isEmpty() == false) { - Set systemLoaderURLs = JarHell.parseModulesAndClassPath(); - for (PluginBundle bundle : sortedBundles) { - PluginsUtils.checkBundleJarHell(systemLoaderURLs, bundle, transitiveUrls); - loadBundle(bundle, loaded, qualifiedExports); - } - } - - loadExtensions(loaded.values()); - return loaded; + private Map loadPluginBundles(Settings settings, Path configPath, PluginsLoader pluginsLoader) { + Map loadedPlugins = new LinkedHashMap<>(); + pluginsLoader.pluginLayers().forEach(pl -> loadBundle(pl, loadedPlugins, settings, configPath)); + loadExtensions(loadedPlugins.values()); + return loadedPlugins; } // package-private for test visibility @@ -443,68 +363,43 @@ private static String extensionConstructorMessage(Class extensi return "constructor for extension [" + extensionClass.getName() + "] of type [" + extensionPointType.getName() + "]"; } - private void loadBundle( - PluginBundle bundle, - Map loaded, - Map> qualifiedExports - ) { - String name = bundle.plugin.getName(); - logger.debug(() -> "Loading bundle: " + name); - - PluginsUtils.verifyCompatibility(bundle.plugin); + private void loadBundle(PluginLayer pluginLayer, Map loadedPlugins, Settings settings, Path configPath) { + String name = pluginLayer.pluginBundle().plugin.getName(); + logger.debug(() -> "Loading plugin bundle: " + name); - // collect the list of extended plugins + // validate the list of extended plugins List extendedPlugins = new ArrayList<>(); - for (String extendedPluginName : bundle.plugin.getExtendedPlugins()) { - LoadedPlugin extendedPlugin = loaded.get(extendedPluginName); + for (String extendedPluginName : pluginLayer.pluginBundle().plugin.getExtendedPlugins()) { + LoadedPlugin extendedPlugin = loadedPlugins.get(extendedPluginName); assert extendedPlugin != null; if (ExtensiblePlugin.class.isInstance(extendedPlugin.instance()) == false) { throw new IllegalStateException("Plugin [" + name + "] cannot extend non-extensible plugin [" + extendedPluginName + "]"); } - assert extendedPlugin.loader() != null : "All non-classpath plugins should be loaded with a classloader"; extendedPlugins.add(extendedPlugin); logger.debug( - () -> "Loading bundle: " + name + ", ext plugins: " + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() + () -> "Loading plugin bundle: " + + name + + ", ext plugins: " + + extendedPlugins.stream().map(lp -> lp.descriptor().getName()).toList() ); } - final ClassLoader parentLoader = ExtendedPluginsClassLoader.create( - getClass().getClassLoader(), - extendedPlugins.stream().map(LoadedPlugin::loader).toList() - ); - LayerAndLoader spiLayerAndLoader = null; - if (bundle.hasSPI()) { - spiLayerAndLoader = createSPI(bundle, parentLoader, extendedPlugins, qualifiedExports); - } - - final ClassLoader pluginParentLoader = spiLayerAndLoader == null ? parentLoader : spiLayerAndLoader.loader(); - final LayerAndLoader pluginLayerAndLoader = createPlugin( - bundle, - pluginParentLoader, - extendedPlugins, - spiLayerAndLoader, - qualifiedExports - ); - final ClassLoader pluginClassLoader = pluginLayerAndLoader.loader(); - - if (spiLayerAndLoader == null) { - // use full implementation for plugins extending this one - spiLayerAndLoader = pluginLayerAndLoader; - } + PluginBundle pluginBundle = pluginLayer.pluginBundle(); + ClassLoader pluginClassLoader = pluginLayer.pluginClassLoader(); // reload SPI with any new services from the plugin - reloadLuceneSPI(pluginClassLoader); + reloadLuceneSPI(pluginLayer.pluginClassLoader()); ClassLoader cl = Thread.currentThread().getContextClassLoader(); try { // Set context class loader to plugin's class loader so that plugins // that have dependencies with their own SPI endpoints have a chance to load // and initialize them appropriately. - privilegedSetContextClassLoader(pluginClassLoader); + privilegedSetContextClassLoader(pluginLayer.pluginClassLoader()); Plugin plugin; - if (bundle.pluginDescriptor().isStable()) { - stablePluginsRegistry.scanBundleForStablePlugins(bundle, pluginClassLoader); + if (pluginBundle.pluginDescriptor().isStable()) { + stablePluginsRegistry.scanBundleForStablePlugins(pluginBundle, pluginClassLoader); /* Contrary to old plugins we don't need an instance of the plugin here. Stable plugin register components (like CharFilterFactory) in stable plugin registry, which is then used in AnalysisModule @@ -514,16 +409,16 @@ Stable plugin register components (like CharFilterFactory) in stable plugin regi We need to pass a name though so that we can show that a plugin was loaded (via cluster state api) This might need to be revisited once support for settings is added */ - plugin = new StablePluginPlaceHolder(bundle.plugin.getName()); + plugin = new StablePluginPlaceHolder(pluginBundle.plugin.getName()); } else { - Class pluginClass = loadPluginClass(bundle.plugin.getClassname(), pluginClassLoader); + Class pluginClass = loadPluginClass(pluginBundle.plugin.getClassname(), pluginClassLoader); if (pluginClassLoader != pluginClass.getClassLoader()) { throw new IllegalStateException( "Plugin [" + name + "] must reference a class loader local Plugin class [" - + bundle.plugin.getClassname() + + pluginBundle.plugin.getClassname() + "] (class loader [" + pluginClass.getClassLoader() + "])" @@ -531,75 +426,12 @@ We need to pass a name though so that we can show that a plugin was loaded (via } plugin = loadPlugin(pluginClass, settings, configPath); } - loaded.put(name, new LoadedPlugin(bundle.plugin, plugin, spiLayerAndLoader.loader(), spiLayerAndLoader.layer())); + loadedPlugins.put(name, new LoadedPlugin(pluginBundle.plugin, plugin)); } finally { privilegedSetContextClassLoader(cl); } } - static LayerAndLoader createSPI( - PluginBundle bundle, - ClassLoader parentLoader, - List extendedPlugins, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, modular"); - return createSpiModuleLayer( - bundle.spiUrls, - parentLoader, - extendedPlugins.stream().map(LoadedPlugin::layer).toList(), - qualifiedExports - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", creating spi, non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.spiUrls.toArray(new URL[0]), parentLoader)); - } - } - - static LayerAndLoader createPlugin( - PluginBundle bundle, - ClassLoader pluginParentLoader, - List extendedPlugins, - LayerAndLoader spiLayerAndLoader, - Map> qualifiedExports - ) { - final PluginDescriptor plugin = bundle.plugin; - if (plugin.getModuleName().isPresent()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", modular"); - var parentLayers = Stream.concat( - Stream.ofNullable(spiLayerAndLoader != null ? spiLayerAndLoader.layer() : null), - extendedPlugins.stream().map(LoadedPlugin::layer) - ).toList(); - return createPluginModuleLayer(bundle, pluginParentLoader, parentLayers, qualifiedExports); - } else if (plugin.isStable()) { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular as synthetic module"); - return LayerAndLoader.ofLoader( - UberModuleClassLoader.getInstance( - pluginParentLoader, - ModuleLayer.boot(), - "synthetic." + toModuleName(plugin.getName()), - bundle.allUrls, - Set.of("org.elasticsearch.server") // TODO: instead of denying server, allow only jvm + stable API modules - ) - ); - } else { - logger.debug(() -> "Loading bundle: " + plugin.getName() + ", non-modular"); - return LayerAndLoader.ofLoader(URLClassLoader.newInstance(bundle.urls.toArray(URL[]::new), pluginParentLoader)); - } - } - - // package-visible for testing - static String toModuleName(String name) { - String result = name.replaceAll("\\W+", ".") // replace non-alphanumeric character strings with dots - .replaceAll("(^[^A-Za-z_]*)", "") // trim non-alpha or underscore characters from start - .replaceAll("\\.$", "") // trim trailing dot - .toLowerCase(Locale.getDefault()); - assert ModuleSupport.isPackageName(result); - return result; - } - private static void checkDeprecations( PluginIntrospector inspector, List pluginDescriptors, @@ -706,173 +538,6 @@ public final Stream filterPlugins(Class type) { return plugins().stream().filter(x -> type.isAssignableFrom(x.instance().getClass())).map(p -> ((T) p.instance())); } - static LayerAndLoader createPluginModuleLayer( - PluginBundle bundle, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - bundle.plugin.getClassname(), - bundle.plugin.getModuleName().get(), - urlsToPaths(bundle.urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - static final LayerAndLoader createSpiModuleLayer( - Set urls, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - // assert bundle.plugin.getModuleName().isPresent(); - return createModuleLayer( - null, // no entry point - spiModuleName(urls), - urlsToPaths(urls), - parentLoader, - parentLayers, - qualifiedExports - ); - } - - private static final Module serverModule = PluginsService.class.getModule(); - - static LayerAndLoader createModuleLayer( - String className, - String moduleName, - Path[] paths, - ClassLoader parentLoader, - List parentLayers, - Map> qualifiedExports - ) { - logger.debug(() -> "Loading bundle: creating module layer and loader for module " + moduleName); - var finder = ModuleFinder.of(paths); - - var configuration = Configuration.resolveAndBind( - ModuleFinder.of(), - parentConfigurationOrBoot(parentLayers), - finder, - Set.of(moduleName) - ); - var controller = privilegedDefineModulesWithOneLoader(configuration, parentLayersOrBoot(parentLayers), parentLoader); - var pluginModule = controller.layer().findModule(moduleName).get(); - ensureEntryPointAccessible(controller, pluginModule, className); - // export/open upstream modules to this plugin module - exposeQualifiedExportsAndOpens(pluginModule, qualifiedExports); - // configure qualified exports/opens to other modules/plugins - addPluginExportsServices(qualifiedExports, controller); - logger.debug(() -> "Loading bundle: created module layer and loader for module " + moduleName); - return new LayerAndLoader(controller.layer(), privilegedFindLoader(controller.layer(), moduleName)); - } - - private static List parentLayersOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot()); - } else { - return parentLayers; - } - } - - private static List parentConfigurationOrBoot(List parentLayers) { - if (parentLayers == null || parentLayers.isEmpty()) { - return List.of(ModuleLayer.boot().configuration()); - } else { - return parentLayers.stream().map(ModuleLayer::configuration).toList(); - } - } - - /** Ensures that the plugins main class (its entry point), if any, is accessible to the server. */ - private static void ensureEntryPointAccessible(Controller controller, Module pluginModule, String className) { - if (className != null) { - controller.addOpens(pluginModule, toPackageName(className), serverModule); - } - } - - protected void addServerExportsService(Map> qualifiedExports) { - final Module serverModule = PluginsService.class.getModule(); - var exportsService = new ModuleQualifiedExportsService(serverModule) { - @Override - protected void addExports(String pkg, Module target) { - serverModule.addExports(pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - serverModule.addOpens(pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, serverModule.getName()); - } - - private static void addPluginExportsServices(Map> qualifiedExports, Controller controller) { - for (Module module : controller.layer().modules()) { - var exportsService = new ModuleQualifiedExportsService(module) { - @Override - protected void addExports(String pkg, Module target) { - controller.addExports(module, pkg, target); - } - - @Override - protected void addOpens(String pkg, Module target) { - controller.addOpens(module, pkg, target); - } - }; - addExportsService(qualifiedExports, exportsService, module.getName()); - } - } - - /** Determines the module name of the SPI module, given its URL. */ - static String spiModuleName(Set spiURLS) { - ModuleFinder finder = ModuleFinder.of(urlsToPaths(spiURLS)); - var mrefs = finder.findAll(); - assert mrefs.size() == 1 : "Expected a single module, got:" + mrefs; - return mrefs.stream().findFirst().get().descriptor().name(); - } - - /** - * Tuple of module layer and loader. - * Modular Plugins have a plugin specific loader and layer. - * Non-Modular plugins have a plugin specific loader and the boot layer. - */ - record LayerAndLoader(ModuleLayer layer, ClassLoader loader) { - - LayerAndLoader { - Objects.requireNonNull(layer); - Objects.requireNonNull(loader); - } - - static LayerAndLoader ofLoader(ClassLoader loader) { - return new LayerAndLoader(ModuleLayer.boot(), loader); - } - } - - @SuppressForbidden(reason = "I need to convert URL's to Paths") - static final Path[] urlsToPaths(Set urls) { - return urls.stream().map(PluginsService::uncheckedToURI).map(PathUtils::get).toArray(Path[]::new); - } - - static final URI uncheckedToURI(URL url) { - try { - return url.toURI(); - } catch (URISyntaxException e) { - throw new AssertionError(new IOException(e)); - } - } - - static final String toPackageName(String className) { - assert className.endsWith(".") == false; - int index = className.lastIndexOf('.'); - if (index == -1) { - throw new IllegalStateException("invalid class name:" + className); - } - return className.substring(0, index); - } - @SuppressWarnings("removal") private static void privilegedSetContextClassLoader(ClassLoader loader) { AccessController.doPrivileged((PrivilegedAction) () -> { @@ -880,16 +545,4 @@ private static void privilegedSetContextClassLoader(ClassLoader loader) { return null; }); } - - @SuppressWarnings("removal") - static Controller privilegedDefineModulesWithOneLoader(Configuration cf, List parentLayers, ClassLoader parentLoader) { - return AccessController.doPrivileged( - (PrivilegedAction) () -> ModuleLayer.defineModulesWithOneLoader(cf, parentLayers, parentLoader) - ); - } - - @SuppressWarnings("removal") - static ClassLoader privilegedFindLoader(ModuleLayer layer, String name) { - return AccessController.doPrivileged((PrivilegedAction) () -> layer.findLoader(name)); - } } diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java index 44fb531f8610e..155cff57a0ebf 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginsUtils.java @@ -210,12 +210,12 @@ public static void checkForFailedPluginRemovals(final Path pluginsDirectory) thr } /** Get bundles for plugins installed in the given modules directory. */ - static Set getModuleBundles(Path modulesDirectory) throws IOException { + public static Set getModuleBundles(Path modulesDirectory) throws IOException { return findBundles(modulesDirectory, "module"); } /** Get bundles for plugins installed in the given plugins directory. */ - static Set getPluginBundles(final Path pluginsDirectory) throws IOException { + public static Set getPluginBundles(final Path pluginsDirectory) throws IOException { return findBundles(pluginsDirectory, "plugin"); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index 3a210199065b7..2cd6e2b11ef7a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -127,7 +127,16 @@ public static Map createAttributesMap( OperationPurpose purpose, String operation ) { - return Map.of("repo_type", repositoryMetadata.type(), "operation", operation, "purpose", purpose.getKey()); + return Map.of( + "repo_type", + repositoryMetadata.type(), + "repo_name", + repositoryMetadata.name(), + "operation", + operation, + "purpose", + purpose.getKey() + ); } } diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index ae9ae6f8b5bf9..5f907572641a6 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -22,14 +22,27 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.file.MasterNodeFileWatchingService; import org.elasticsearch.env.Environment; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorImpact; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.BufferedInputStream; import java.io.IOException; import java.nio.file.Files; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.health.ImpactArea.DEPLOYMENT_MANAGEMENT; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_VERSION_ONLY; import static org.elasticsearch.xcontent.XContentType.JSON; @@ -53,6 +66,7 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement public static final String NAMESPACE = "file_settings"; public static final String OPERATOR_DIRECTORY = "operator"; private final ReservedClusterStateService stateService; + private final FileSettingsHealthIndicatorService healthIndicatorService; /** * Constructs the {@link FileSettingsService} @@ -60,10 +74,21 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement * @param clusterService so we can register ourselves as a cluster state change listener * @param stateService an instance of the immutable cluster state controller, so we can perform the cluster state changes * @param environment we need the environment to pull the location of the config and operator directories + * @param healthIndicatorService tracks the success or failure of file-based settings */ - public FileSettingsService(ClusterService clusterService, ReservedClusterStateService stateService, Environment environment) { + public FileSettingsService( + ClusterService clusterService, + ReservedClusterStateService stateService, + Environment environment, + FileSettingsHealthIndicatorService healthIndicatorService + ) { super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; + this.healthIndicatorService = healthIndicatorService; + } + + public FileSettingsHealthIndicatorService healthIndicatorService() { + return healthIndicatorService; } /** @@ -121,6 +146,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { @Override protected void processFileChanges() throws ExecutionException, InterruptedException, IOException { logger.info("processing path [{}] for [{}]", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_VERSION_ONLY); } @@ -131,6 +157,7 @@ protected void processFileChanges() throws ExecutionException, InterruptedExcept @Override protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { logger.info("processing path [{}] for [{}] on service start", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_OR_SAME_VERSION); } @@ -146,6 +173,16 @@ private void processFileChanges(ReservedStateVersionCheck versionCheck) throws I completion.get(); } + private void completeProcessing(Exception e, PlainActionFuture completion) { + if (e != null) { + healthIndicatorService.failureOccurred(e.toString()); + completion.onFailure(e); + } else { + completion.onResponse(null); + healthIndicatorService.successOccurred(); + } + } + @Override protected void onProcessFileChangesException(Exception e) { if (e instanceof ExecutionException) { @@ -172,11 +209,61 @@ protected void processInitialFileMissing() throws ExecutionException, Interrupte completion.get(); } - private static void completeProcessing(Exception e, PlainActionFuture completion) { - if (e != null) { - completion.onFailure(e); - } else { - completion.onResponse(null); + public static class FileSettingsHealthIndicatorService implements HealthIndicatorService { + static final String NAME = "file_settings"; + static final String NO_CHANGES_SYMPTOM = "No file-based setting changes have occurred"; + static final String SUCCESS_SYMPTOM = "The most recent file-based settings were applied successfully"; + static final String FAILURE_SYMPTOM = "The most recent file-based settings encountered an error"; + + static final List STALE_SETTINGS_IMPACT = List.of( + new HealthIndicatorImpact( + NAME, + "stale", + 3, + "The most recent file-based settings changes have not been applied.", + List.of(DEPLOYMENT_MANAGEMENT) + ) + ); + + private final AtomicLong changeCount = new AtomicLong(0); + private final AtomicLong failureStreak = new AtomicLong(0); + private final AtomicReference mostRecentFailure = new AtomicReference<>(); + + public void changeOccurred() { + changeCount.incrementAndGet(); + } + + public void successOccurred() { + failureStreak.set(0); + } + + public void failureOccurred(String description) { + failureStreak.incrementAndGet(); + mostRecentFailure.set(description); + } + + @Override + public String name() { + return NAME; + } + + @Override + public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { + if (0 == changeCount.get()) { + return createIndicator(GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } + long numFailures = failureStreak.get(); + if (0 == numFailures) { + return createIndicator(GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } else { + return createIndicator( + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", numFailures, "most_recent_failure", mostRecentFailure.get())), + STALE_SETTINGS_IMPACT, + List.of() + ); + } } } } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index f1b59ed14cefb..4564a37dacf4a 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -122,6 +122,7 @@ public final void handleRequest(RestRequest request, RestChannel channel, NodeCl ); } + usageCount.increment(); if (request.isStreamedContent()) { assert action instanceof RequestBodyChunkConsumer; var chunkConsumer = (RequestBodyChunkConsumer) action; @@ -137,11 +138,11 @@ public void close() { chunkConsumer.streamClose(); } }); + action.accept(channel); + } else { + action.accept(channel); + request.getHttpRequest().release(); } - - usageCount.increment(); - // execute the action - action.accept(channel); } } diff --git a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java index cb5155cb0de0b..21a44ac9af5c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java @@ -43,11 +43,6 @@ public boolean canTripCircuitBreaker() { return delegate.canTripCircuitBreaker(); } - @Override - public boolean allowsUnsafeBuffers() { - return delegate.allowsUnsafeBuffers(); - } - @Override public boolean supportsBulkContent() { return delegate.supportsBulkContent(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 7446ec5bb6717..49fe794bbe615 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -432,10 +432,6 @@ private void dispatchRequest( } // iff we could reserve bytes for the request we need to send the response also over this channel responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, methodHandlers); - // TODO: Count requests double in the circuit breaker if they need copying? - if (handler.allowsUnsafeBuffers() == false) { - request.ensureSafeBuffers(); - } if (handler.allowSystemIndexAccessByDefault() == false) { // The ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an Elastic product and diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index cf66e402d3691..572e92e369a63 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -69,18 +69,6 @@ default Scope getServerlessScope() { return serverlessScope == null ? null : serverlessScope.value(); } - /** - * Indicates if the RestHandler supports working with pooled buffers. If the request handler will not escape the return - * {@link RestRequest#content()} or any buffers extracted from it then there is no need to make a copies of any pooled buffers in the - * {@link RestRequest} instance before passing a request to this handler. If this instance does not support pooled/unsafe buffers - * {@link RestRequest#ensureSafeBuffers()} should be called on any request before passing it to {@link #handleRequest}. - * - * @return true iff the handler supports requests that make use of pooled buffers - */ - default boolean allowsUnsafeBuffers() { - return false; - } - /** * The list of {@link Route}s that this RestHandler is responsible for handling. */ diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 17eda305b5ccf..17d85a8eabb1c 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -16,17 +16,21 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpBody; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.ToXContent; @@ -51,6 +55,8 @@ public class RestRequest implements ToXContent.Params, Traceable { + private static final Logger logger = LogManager.getLogger(RestRequest.class); + /** * Internal marker request parameter to indicate that a request was made in serverless mode. Use this parameter, together with * {@link #OPERATOR_REQUEST} if you need to toggle behavior for serverless, for example to enforce partial API restrictions @@ -187,15 +193,6 @@ protected RestRequest(RestRequest other) { } } - /** - * Invoke {@link HttpRequest#releaseAndCopy()} on the http request in this instance and replace a pooled http request - * with an unpooled copy. This is supposed to be used before passing requests to {@link RestHandler} instances that can not safely - * handle http requests that use pooled buffers as determined by {@link RestHandler#allowsUnsafeBuffers()}. - */ - void ensureSafeBuffers() { - httpRequest = httpRequest.releaseAndCopy(); - } - /** * Creates a new REST request. * @@ -306,9 +303,31 @@ public boolean isFullContent() { return httpRequest.body().isFull(); } + /** + * Returns a copy of HTTP content. The copy is GC-managed and does not require reference counting. + * Please use {@link #releasableContent()} to avoid content copy. + */ + @SuppressForbidden(reason = "temporarily support content copy while migrating RestHandlers to ref counted pooled buffers") public BytesReference content() { + return BytesReference.copyBytes(releasableContent()); + } + + /** + * Returns a direct reference to the network buffer containing the request body. The HTTP layers will release their references to this + * buffer as soon as they have finished the synchronous steps of processing the request on the network thread, which will by default + * release the buffer back to the pool where it may be re-used for another request. If you need to keep the buffer alive past the end of + * these synchronous steps, acquire your own reference to this buffer and release it once it's no longer needed. + */ + public ReleasableBytesReference releasableContent() { this.contentConsumed = true; - return httpRequest.body().asFull().bytes(); + var bytes = httpRequest.body().asFull().bytes(); + if (bytes.hasReferences() == false) { + var e = new IllegalStateException("http releasable content accessed after release"); + logger.error(e.getMessage(), e); + assert false : e; + throw e; + } + return bytes; } public boolean isStreamedContent() { @@ -319,18 +338,32 @@ public HttpBody.Stream contentStream() { return httpRequest.body().asStream(); } - /** - * @return content of the request body or throw an exception if the body or content type is missing - */ - public final BytesReference requiredContent() { + private void ensureContent() { if (hasContent() == false) { throw new ElasticsearchParseException("request body is required"); } else if (xContentType.get() == null) { throwValidationException("unknown content type"); } + } + + /** + * @return copy of the request body or throw an exception if the body or content type is missing. + * See {@link #content()}. Please use {@link #requiredReleasableContent()} to avoid content copy. + */ + public final BytesReference requiredContent() { + ensureContent(); return content(); } + /** + * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. + * See {@link #releasableContent()}. It's a recommended method to handle HTTP content without copying it. + */ + public ReleasableBytesReference requiredReleasableContent() { + ensureContent(); + return releasableContent(); + } + private static void throwValidationException(String msg) { ValidationException unknownContentType = new ValidationException(); unknownContentType.addValidationError(msg); diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java index e4105363e1bce..57b4d2990c8e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Tuple; @@ -44,10 +45,10 @@ public boolean hasContent() { } @Override - public BytesReference content() { + public ReleasableBytesReference releasableContent() { if (filteredBytes == null) { Tuple> result = XContentHelper.convertToMap( - restRequest.requiredContent(), + restRequest.requiredReleasableContent(), true, restRequest.getXContentType() ); @@ -63,7 +64,7 @@ public BytesReference content() { throw new ElasticsearchException("failed to parse request", e); } } - return filteredBytes; + return ReleasableBytesReference.wrap(filteredBytes); } }; } else { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index de3fd390ec86d..9428ef5390b2f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -10,6 +10,7 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestParser; @@ -102,9 +103,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); + ReleasableBytesReference content = request.requiredReleasableContent(); + try { bulkRequest.add( - request.requiredContent(), + content, defaultIndex, defaultRouting, defaultFetchSourceContext, @@ -119,8 +122,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } catch (Exception e) { return channel -> new RestToXContentListener<>(channel).onFailure(parseFailureException(e)); } - - return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); + return channel -> { + content.mustIncRef(); + client.bulk(bulkRequest, ActionListener.releaseAfter(new RestRefCountedChunkedToXContentListener<>(channel), content)); + }; } else { String waitForActiveShards = request.param("wait_for_active_shards"); TimeValue timeout = request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT); @@ -270,11 +275,6 @@ public boolean supportsBulkContent() { return true; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public Set supportedCapabilities() { return capabilities; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index c2437dcb96fa6..d81ac03492d59 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -9,12 +9,14 @@ package org.elasticsearch.rest.action.document; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -104,11 +106,12 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + ReleasableBytesReference source = request.requiredReleasableContent(); IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.setPipeline(request.param("pipeline")); - indexRequest.source(request.requiredContent(), request.getXContentType()); + indexRequest.source(source, request.getXContentType()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.setRefreshPolicy(request.param("refresh")); indexRequest.version(RestActions.parseVersion(request)); @@ -126,10 +129,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC indexRequest.opType(sOpType); } - return channel -> client.index( - indexRequest, - new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())) - ); + return channel -> { + source.mustIncRef(); + client.index( + indexRequest, + ActionListener.releaseAfter( + new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())), + source + ) + ); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 80a85d3b9b748..ff062084a3cbb 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -423,8 +423,4 @@ protected Set responseParams() { return RESPONSE_PARAMS; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java index 241f30b367782..e5c4826bfce97 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/SearchCapabilities.java @@ -40,6 +40,8 @@ private SearchCapabilities() {} private static final String NESTED_RETRIEVER_INNER_HITS_SUPPORT = "nested_retriever_inner_hits_support"; /** Support multi-dense-vector script field access. */ private static final String MULTI_DENSE_VECTOR_SCRIPT_ACCESS = "multi_dense_vector_script_access"; + /** Initial support for multi-dense-vector maxSim functions access. */ + private static final String MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM = "multi_dense_vector_script_max_sim"; private static final String RANDOM_SAMPLER_WITH_SCORED_SUBAGGS = "random_sampler_with_scored_subaggs"; @@ -56,6 +58,7 @@ private SearchCapabilities() {} if (MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()) { capabilities.add(MULTI_DENSE_VECTOR_FIELD_MAPPER); capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_ACCESS); + capabilities.add(MULTI_DENSE_VECTOR_SCRIPT_MAX_SIM); } if (Build.current().isSnapshot()) { capabilities.add(KQL_QUERY_SUPPORTED); diff --git a/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java new file mode 100644 index 0000000000000..136c5e7b57d4b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/MultiVectorScoreScriptUtils.java @@ -0,0 +1,372 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper; +import org.elasticsearch.script.field.vectors.DenseVector; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; + +import java.io.IOException; +import java.util.HexFormat; +import java.util.List; + +public class MultiVectorScoreScriptUtils { + + public static class MultiDenseVectorFunction { + protected final ScoreScript scoreScript; + protected final MultiDenseVectorDocValuesField field; + + public MultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field) { + this.scoreScript = scoreScript; + this.field = field; + } + + void setNextVector() { + try { + field.setNextDocId(scoreScript._getDocId()); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); + } + if (field.isEmpty()) { + throw new IllegalArgumentException("A document doesn't have a value for a multi-vector field!"); + } + } + } + + public static class ByteMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final byte[][] queryVector; + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + field.getElementType().checkDimensions(field.get().getDims(), queryVector.get(0).size()); + this.queryVector = new byte[queryVector.size()][queryVector.get(0).size()]; + float[] validateValues = new float[queryVector.size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + final Number number = queryVector.get(i).get(j); + byte value = number.byteValue(); + this.queryVector[i][j] = value; + validateValues[i] = number.floatValue(); + } + field.getElementType().checkVectorBounds(validateValues); + } + } + + /** + * Constructs a dense vector function used for byte-sized vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public ByteMultiDenseVectorFunction(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + this.queryVector = queryVector; + } + } + + public static class FloatMultiDenseVectorFunction extends MultiDenseVectorFunction { + protected final float[][] queryVector; + + /** + * Constructs a dense vector function used for float vectors. + * + * @param scoreScript The script in which this function was referenced. + * @param field The vector field. + * @param queryVector The query vector. + */ + public FloatMultiDenseVectorFunction( + ScoreScript scoreScript, + MultiDenseVectorDocValuesField field, + List> queryVector + ) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + DenseVector.checkDimensions(field.get().getDims(), queryVector.get(0).size()); + + this.queryVector = new float[queryVector.size()][queryVector.get(0).size()]; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + for (int j = 0; j < queryVector.get(i).size(); j++) { + this.queryVector[i][j] = queryVector.get(i).get(j).floatValue(); + } + field.getElementType().checkVectorBounds(this.queryVector[i]); + } + } + } + + // Calculate Hamming distances between a query's dense vector and documents' dense vectors + public interface MaxSimInvHammingDistanceInterface { + float maxSimInvHamming(); + } + + public static class ByteMaxSimInvHammingDistance extends ByteMultiDenseVectorFunction implements MaxSimInvHammingDistanceInterface { + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public ByteMaxSimInvHammingDistance(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public float maxSimInvHamming() { + setNextVector(); + return field.get().maxSimInvHamming(queryVector); + } + } + + private record BytesOrList(byte[][] bytes, List> list) {} + + @SuppressWarnings("unchecked") + private static BytesOrList parseBytes(Object queryVector) { + if (queryVector instanceof List) { + // check if its a list of strings or list of lists + if (((List) queryVector).get(0) instanceof List) { + return new BytesOrList(null, ((List>) queryVector)); + } else if (((List) queryVector).get(0) instanceof String) { + byte[][] parsedQueryVector = new byte[((List) queryVector).size()][]; + int lastSize = -1; + for (int i = 0; i < ((List) queryVector).size(); i++) { + parsedQueryVector[i] = HexFormat.of().parseHex((String) ((List) queryVector).get(i)); + if (lastSize != -1 && lastSize != parsedQueryVector[i].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = parsedQueryVector[i].length; + } + return new BytesOrList(parsedQueryVector, null); + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } else { + throw new IllegalArgumentException("Unsupported input object for byte vectors: " + queryVector.getClass().getName()); + } + } + + public static final class MaxSimInvHamming { + + private final MaxSimInvHammingDistanceInterface function; + + public MaxSimInvHamming(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + if (field.getElementType() == DenseVectorFieldMapper.ElementType.FLOAT) { + throw new IllegalArgumentException("hamming distance is only supported for byte or bit vectors"); + } + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.bytes); + } else { + this.function = new ByteMaxSimInvHammingDistance(scoreScript, field, bytesOrList.list); + } + } + + public double maxSimInvHamming() { + return function.maxSimInvHamming(); + } + } + + // Calculate a dot product between a query's dense vector and documents' dense vectors + public interface MaxSimDotProductInterface { + double maxSimDotProduct(); + } + + public static class MaxSimBitDotProduct extends MultiDenseVectorFunction implements MaxSimDotProductInterface { + private final byte[][] byteQueryVector; + private final float[][] floatQueryVector; + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field); + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("Cannot calculate bit dot product for non-bit vectors"); + } + int fieldDims = field.get().getDims(); + if (fieldDims != queryVector.length * Byte.SIZE && fieldDims != queryVector.length) { + throw new IllegalArgumentException( + "The query vector has an incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + queryVector.length + + "]." + ); + } + this.byteQueryVector = queryVector; + this.floatQueryVector = null; + } + + public MaxSimBitDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field); + if (queryVector.isEmpty()) { + throw new IllegalArgumentException("The query vector is empty."); + } + if (field.getElementType() != DenseVectorFieldMapper.ElementType.BIT) { + throw new IllegalArgumentException("cannot calculate bit dot product for non-bit vectors"); + } + float[][] floatQueryVector = new float[queryVector.size()][]; + byte[][] byteQueryVector = new byte[queryVector.size()][]; + boolean isFloat = false; + int lastSize = -1; + for (int i = 0; i < queryVector.size(); i++) { + if (lastSize != -1 && lastSize != queryVector.get(i).size()) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have inconsistent number of dimensions." + ); + } + lastSize = queryVector.get(i).size(); + floatQueryVector[i] = new float[queryVector.get(i).size()]; + if (isFloat == false) { + byteQueryVector[i] = new byte[queryVector.get(i).size()]; + } + for (int j = 0; j < queryVector.get(i).size(); j++) { + Number number = queryVector.get(i).get(j); + floatQueryVector[i][j] = number.floatValue(); + if (isFloat == false) { + byteQueryVector[i][j] = number.byteValue(); + } + if (isFloat + || floatQueryVector[i][j] % 1.0f != 0.0f + || floatQueryVector[i][j] < Byte.MIN_VALUE + || floatQueryVector[i][j] > Byte.MAX_VALUE) { + isFloat = true; + } + } + } + int fieldDims = field.get().getDims(); + if (isFloat) { + this.floatQueryVector = floatQueryVector; + this.byteQueryVector = null; + if (fieldDims != floatQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims + + "] for float wise operations: provided [" + + floatQueryVector[0].length + + "]." + ); + } + } else { + this.floatQueryVector = null; + this.byteQueryVector = byteQueryVector; + if (fieldDims != byteQueryVector[0].length * Byte.SIZE && fieldDims != byteQueryVector[0].length) { + throw new IllegalArgumentException( + "The query vector contains inner vectors which have incorrect number of dimensions. Must be [" + + fieldDims / 8 + + "] for bitwise operations, or [" + + fieldDims + + "] for byte wise operations: provided [" + + byteQueryVector[0].length + + "]." + ); + } + } + } + + @Override + public double maxSimDotProduct() { + setNextVector(); + return byteQueryVector != null ? field.get().maxSimDotProduct(byteQueryVector) : field.get().maxSimDotProduct(floatQueryVector); + } + } + + public static class MaxSimByteDotProduct extends ByteMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public MaxSimByteDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, byte[][] queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static class MaxSimFloatDotProduct extends FloatMultiDenseVectorFunction implements MaxSimDotProductInterface { + + public MaxSimFloatDotProduct(ScoreScript scoreScript, MultiDenseVectorDocValuesField field, List> queryVector) { + super(scoreScript, field, queryVector); + } + + public double maxSimDotProduct() { + setNextVector(); + return field.get().maxSimDotProduct(queryVector); + } + } + + public static final class MaxSimDotProduct { + + private final MaxSimDotProductInterface function; + + @SuppressWarnings("unchecked") + public MaxSimDotProduct(ScoreScript scoreScript, Object queryVector, String fieldName) { + MultiDenseVectorDocValuesField field = (MultiDenseVectorDocValuesField) scoreScript.field(fieldName); + function = switch (field.getElementType()) { + case BIT -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimBitDotProduct(scoreScript, field, bytesOrList.list); + } + } + case BYTE -> { + BytesOrList bytesOrList = parseBytes(queryVector); + if (bytesOrList.bytes != null) { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.bytes); + } else { + yield new MaxSimByteDotProduct(scoreScript, field, bytesOrList.list); + } + } + case FLOAT -> { + if (queryVector instanceof List) { + yield new MaxSimFloatDotProduct(scoreScript, field, (List>) queryVector); + } + throw new IllegalArgumentException("Unsupported input object for float vectors: " + queryVector.getClass().getName()); + } + }; + } + + public double maxSimDotProduct() { + return function.maxSimDotProduct(); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java index 24e19a803ff38..7805816090d51 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/BitMultiDenseVector.java @@ -10,11 +10,13 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.simdvec.ESVectorUtil; -import java.util.Iterator; +import java.util.Arrays; public class BitMultiDenseVector extends ByteMultiDenseVector { - public BitMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public BitMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { super(vectorValues, magnitudesBytes, numVecs, dims); } @@ -31,6 +33,70 @@ public void checkDimensions(int qvDims) { } } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipFloatBit(query[i], vv)); + } + } + float sums = 0; + for (float m : maxes) { + sums += m; + } + return sums; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + if (query[0].length == dims) { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.andBitCount(query[i], vv)); + } + } + } else { + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ESVectorUtil.ipByteBit(query[i], vv)); + } + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = this.getDims(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + @Override public int getDims() { return dims * Byte.SIZE; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java index e610d10146b2f..5e9d3e05746c8 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVector.java @@ -10,21 +10,22 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; import org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder; +import java.util.Arrays; import java.util.Iterator; public class ByteMultiDenseVector implements MultiDenseVector { - protected final Iterator vectorValues; + protected final VectorIterator vectorValues; protected final int numVecs; protected final int dims; - private Iterator floatDocVectors; private float[] magnitudes; private final BytesRef magnitudesBytes; - public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { + public ByteMultiDenseVector(VectorIterator vectorValues, BytesRef magnitudesBytes, int numVecs, int dims) { assert magnitudesBytes.length == numVecs * Float.BYTES; this.vectorValues = vectorValues; this.numVecs = numVecs; @@ -33,11 +34,50 @@ public ByteMultiDenseVector(Iterator vectorValues, BytesRef magnitudesBy } @Override - public Iterator getVectors() { - if (floatDocVectors == null) { - floatDocVectors = new ByteToFloatIteratorWrapper(vectorValues, dims); + public float maxSimDotProduct(float[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimInvHamming(byte[][] query) { + vectorValues.reset(); + int bitCount = dims * Byte.SIZE; + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + byte[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], ((bitCount - VectorUtil.xorBitCount(vv, query[i])) / (float) bitCount)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; } - return floatDocVectors; + return sum; + } + + @Override + public Iterator getVectors() { + return new ByteToFloatIteratorWrapper(vectorValues.copy(), dims); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java index d1e062e0a3dee..d45c5b85137f5 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteMultiDenseVectorDocValuesField.java @@ -23,7 +23,7 @@ public class ByteMultiDenseVectorDocValuesField extends MultiDenseVectorDocValue private final BinaryDocValues magnitudes; protected final int dims; protected int numVecs; - protected Iterator vectorValue; + protected VectorIterator vectorValue; protected boolean decoded; protected BytesRef value; protected BytesRef magnitudesValue; @@ -111,7 +111,7 @@ public boolean isEmpty() { return value == null; } - static class ByteVectorIterator implements Iterator { + static class ByteVectorIterator implements VectorIterator { private final byte[] buffer; private final BytesRef vectorValues; private final int size; @@ -138,5 +138,15 @@ public byte[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new ByteVectorIterator(vectorValues, new byte[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java index 9ffe8b3b970c4..9c2f7eb6a86d4 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVector.java @@ -10,7 +10,9 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import java.util.Arrays; import java.util.Iterator; import static org.elasticsearch.index.mapper.vectors.VectorEncoderDecoder.getMultiMagnitudes; @@ -21,19 +23,47 @@ public class FloatMultiDenseVector implements MultiDenseVector { private float[] magnitudesArray = null; private final int dims; private final int numVectors; - private final Iterator decodedDocVector; + private final VectorIterator vectorValues; - public FloatMultiDenseVector(Iterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { + public FloatMultiDenseVector(VectorIterator decodedDocVector, BytesRef magnitudes, int numVectors, int dims) { assert magnitudes.length == numVectors * Float.BYTES; - this.decodedDocVector = decodedDocVector; + this.vectorValues = decodedDocVector; this.magnitudes = magnitudes; this.numVectors = numVectors; this.dims = dims; } + @Override + public float maxSimDotProduct(float[][] query) { + vectorValues.reset(); + float[] maxes = new float[query.length]; + Arrays.fill(maxes, Float.NEGATIVE_INFINITY); + while (vectorValues.hasNext()) { + float[] vv = vectorValues.next(); + for (int i = 0; i < query.length; i++) { + maxes[i] = Math.max(maxes[i], VectorUtil.dotProduct(query[i], vv)); + } + } + float sum = 0; + for (float m : maxes) { + sum += m; + } + return sum; + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new UnsupportedOperationException("use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new UnsupportedOperationException("hamming distance is not supported for float vectors"); + } + @Override public Iterator getVectors() { - return decodedDocVector; + return vectorValues.copy(); } @Override diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java index 356db58d989c5..c7ac7842afd96 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/FloatMultiDenseVectorDocValuesField.java @@ -110,14 +110,16 @@ private void decodeVectorIfNecessary() { } } - static class FloatVectorIterator implements Iterator { + static class FloatVectorIterator implements VectorIterator { private final float[] buffer; private final FloatBuffer vectorValues; + private final BytesRef vectorValueBytesRef; private final int size; private int idx = 0; FloatVectorIterator(BytesRef vectorValues, float[] buffer, int size) { assert vectorValues.length == (buffer.length * Float.BYTES * size); + this.vectorValueBytesRef = vectorValues; this.vectorValues = ByteBuffer.wrap(vectorValues.bytes, vectorValues.offset, vectorValues.length) .order(ByteOrder.LITTLE_ENDIAN) .asFloatBuffer(); @@ -139,5 +141,16 @@ public float[] next() { idx++; return buffer; } + + @Override + public Iterator copy() { + return new FloatVectorIterator(vectorValueBytesRef, new float[buffer.length], size); + } + + @Override + public void reset() { + idx = 0; + vectorValues.rewind(); + } } } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java index 85c851dbe545c..7d948cf5a74fa 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/MultiDenseVector.java @@ -17,6 +17,12 @@ default void checkDimensions(int qvDims) { checkDimensions(getDims(), qvDims); } + float maxSimDotProduct(float[][] query); + + float maxSimDotProduct(byte[][] query); + + float maxSimInvHamming(byte[][] query); + Iterator getVectors(); float[] getMagnitudes(); @@ -63,6 +69,21 @@ public int getDims() { throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); } + @Override + public float maxSimDotProduct(float[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimDotProduct(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float maxSimInvHamming(byte[][] query) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + @Override public int size() { return 0; diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java new file mode 100644 index 0000000000000..b8615ac877254 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/VectorIterator.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import java.util.Iterator; + +public interface VectorIterator extends Iterator { + Iterator copy(); + + void reset(); + + static VectorIterator from(float[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public float[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } + + static VectorIterator from(byte[][] vectors) { + return new VectorIterator<>() { + private int i = 0; + + @Override + public boolean hasNext() { + return i < vectors.length; + } + + @Override + public byte[] next() { + return vectors[i++]; + } + + @Override + public Iterator copy() { + return from(vectors); + } + + @Override + public void reset() { + i = 0; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index 1ea7769b33384..11444edca080d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -13,6 +13,8 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.elasticsearch.common.CheckedIntFunction; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.Maps; @@ -48,6 +50,8 @@ public abstract class AggregatorBase extends Aggregator { private Map subAggregatorbyName; private long requestBytesUsed; + private final CircuitBreaker breaker; + private int callCount; /** * Constructs a new Aggregator. @@ -72,6 +76,7 @@ protected AggregatorBase( this.metadata = metadata; this.parent = parent; this.context = context; + this.breaker = context.breaker(); assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(this, subAggregatorCardinality); context.addReleasable(this); @@ -327,6 +332,30 @@ protected final InternalAggregations buildEmptySubAggregations() { return InternalAggregations.from(aggs); } + /** + * Builds the aggregations array with the provided size and populates it using the provided function. + */ + protected final InternalAggregation[] buildAggregations(int size, CheckedIntFunction aggFunction) + throws IOException { + final InternalAggregation[] results = new InternalAggregation[size]; + for (int i = 0; i < results.length; i++) { + checkRealMemoryCB("internal_aggregation"); + results[i] = aggFunction.apply(i); + } + return results; + } + + /** + * This method calls the circuit breaker from time to time in order to give it a chance to check available + * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. + * To achieve that, we are passing 0 as the estimated bytes every 1024 calls + */ + protected final void checkRealMemoryCB(String label) { + if ((++callCount & 0x3FF) == 0) { + breaker.addEstimateBytesAndMaybeBreak(0, label); + } + } + @Override public String toString() { return name; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java index 4da2d10cfc0c2..a32211fd4d8fb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/NonCollectingAggregator.java @@ -41,10 +41,6 @@ public final LeafBucketCollector getLeafCollector(AggregationExecutionContext ag @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildEmptyAggregation(); - } - return results; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildEmptyAggregation()); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 252eb0877d024..ea667b821a7dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -9,7 +9,6 @@ package org.elasticsearch.search.aggregations.bucket; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.ObjectArray; @@ -42,10 +41,9 @@ import java.util.function.ToLongFunction; public abstract class BucketsAggregator extends AggregatorBase { - private final CircuitBreaker breaker; + private LongArray docCounts; protected final DocCountProvider docCountProvider; - private int callCount; @SuppressWarnings("this-escape") public BucketsAggregator( @@ -57,7 +55,6 @@ public BucketsAggregator( Map metadata ) throws IOException { super(name, factories, aggCtx, parent, bucketCardinality, metadata); - breaker = aggCtx.breaker(); docCounts = bigArrays().newLongArray(1, true); docCountProvider = new DocCountProvider(); } @@ -83,7 +80,7 @@ public final void collectBucket(LeafBucketCollector subCollector, int doc, long grow(bucketOrd + 1); int docCount = docCountProvider.getDocCount(doc); if (docCounts.increment(bucketOrd, docCount) == docCount) { - updateCircuitBreaker("allocated_buckets"); + checkRealMemoryCB("allocated_buckets"); } subCollector.collect(doc, bucketOrd); } @@ -176,7 +173,7 @@ protected final IntFunction buildSubAggsForBuckets(LongArr prepareSubAggs(bucketOrdsToCollect); InternalAggregation[][] aggregations = new InternalAggregation[subAggregators.length][]; for (int i = 0; i < subAggregators.length; i++) { - updateCircuitBreaker("building_sub_aggregation"); + checkRealMemoryCB("building_sub_aggregation"); aggregations[i] = subAggregators[i].buildAggregations(bucketOrdsToCollect); } return subAggsForBucketFunction(aggregations); @@ -247,31 +244,30 @@ protected final InternalAggregation[] buildAggregationsForFixedBucketCount( Function, InternalAggregation> resultBuilder ) throws IOException { try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(owningBucketOrds.size() * bucketsPerOwningBucketOrd)) { - int bucketOrdIdx = 0; + final int[] bucketOrdIdx = new int[] { 0 }; for (long i = 0; i < owningBucketOrds.size(); i++) { long ord = owningBucketOrds.get(i) * bucketsPerOwningBucketOrd; for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { - bucketOrdsToCollect.set(bucketOrdIdx++, ord++); + bucketOrdsToCollect.set(bucketOrdIdx[0]++, ord++); } } - bucketOrdIdx = 0; + bucketOrdIdx[0] = 0; var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int owningOrdIdx = 0; owningOrdIdx < results.length; owningOrdIdx++) { + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { List buckets = new ArrayList<>(bucketsPerOwningBucketOrd); for (int offsetInOwningOrd = 0; offsetInOwningOrd < bucketsPerOwningBucketOrd; offsetInOwningOrd++) { + checkRealMemoryCBForInternalBucket(); buckets.add( bucketBuilder.build( offsetInOwningOrd, - bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx)), - subAggregationResults.apply(bucketOrdIdx++) + bucketDocCount(bucketOrdsToCollect.get(bucketOrdIdx[0])), + subAggregationResults.apply(bucketOrdIdx[0]++) ) ); } - results[owningOrdIdx] = resultBuilder.apply(buckets); - } - return results; + return resultBuilder.apply(buckets); + }); } } @@ -295,11 +291,10 @@ protected final InternalAggregation[] buildAggregationsForSingleBucket( * here but we don't because single bucket aggs never have. */ var subAggregationResults = buildSubAggsForBuckets(owningBucketOrds); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)); - } - return results; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> resultBuilder.build(owningBucketOrds.get(ordIdx), subAggregationResults.apply(ordIdx)) + ); } @FunctionalInterface @@ -335,37 +330,36 @@ protected final InternalAggregation[] buildAggregationsForVariableBuckets( ); } try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { - int b = 0; + final int[] b = new int[] { 0 }; for (long i = 0; i < owningBucketOrds.size(); i++) { LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); while (ordsEnum.next()) { - bucketOrdsToCollect.set(b++, ordsEnum.ord()); + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); } } var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - b = 0; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { final long owningBucketOrd = owningBucketOrds.get(ordIdx); List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrd); while (ordsEnum.next()) { - if (bucketOrdsToCollect.get(b) != ordsEnum.ord()) { + if (bucketOrdsToCollect.get(b[0]) != ordsEnum.ord()) { // If we hit this, something has gone horribly wrong and we need to investigate throw AggregationErrors.iterationOrderChangedWithoutMutating( bucketOrds.toString(), ordsEnum.ord(), - bucketOrdsToCollect.get(b) + bucketOrdsToCollect.get(b[0]) ); } + checkRealMemoryCBForInternalBucket(); buckets.add( - bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b++)) + bucketBuilder.build(ordsEnum.value(), bucketDocCount(ordsEnum.ord()), subAggregationResults.apply(b[0]++)) ); } - results[ordIdx] = resultBuilder.build(owningBucketOrd, buckets); - } - return results; + return resultBuilder.build(owningBucketOrd, buckets); + }); } } } @@ -425,14 +419,9 @@ protected void preGetSubLeafCollectors(LeafReaderContext ctx) throws IOException docCountProvider.setLeafReaderContext(ctx); } - /** - * This method calls the circuit breaker from time to time in order to give it a chance to check available - * memory in the parent breaker (Which should be a real memory breaker) and break the execution if we are running out. - * To achieve that, we are passing 0 as the estimated bytes every 1024 calls - */ - private void updateCircuitBreaker(String label) { - if ((++callCount & 0x3FF) == 0) { - breaker.addEstimateBytesAndMaybeBreak(0, label); - } + /** This method should be called whenever a new bucket object is created. It will check the real memory + * circuit breaker in a sampling fashion. See {@link #checkRealMemoryCB(String)} */ + protected final void checkRealMemoryCBForInternalBucket() { + checkRealMemoryCB("internal_bucket"); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java index 87ebec525a6fa..d39e90b44579e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java @@ -12,7 +12,6 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.HasAggregations; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.xcontent.ToXContent; import java.util.List; @@ -24,7 +23,7 @@ public interface MultiBucketsAggregation extends Aggregation { * A bucket represents a criteria to which all documents that fall in it adhere to. It is also uniquely identified * by a key, and can potentially hold sub-aggregations computed over all documents in it. */ - interface Bucket extends HasAggregations, ToXContent { + interface Bucket extends HasAggregations { /** * @return The key associated with the bucket */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 30c45ba46d9b7..8b3253418bc23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -465,14 +465,6 @@ public int compareKey(InternalBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - /** - * See {@link CompositeAggregation#bucketToXContent} - */ - throw new UnsupportedOperationException("not implemented"); - } - InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( sourceNames, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index 05fce2cff64d5..344b90b06c4f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -140,6 +140,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw long docCount = bucketDocCount(ordsEnum.ord()); otherDocCounts.increment(ordIdx, docCount); if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = emptyBucketBuilder.get(); } ordsEnum.readValue(spare.getTermBytes()); @@ -158,8 +159,8 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } buildSubAggsForAllBuckets(topBucketsPerOrd, InternalTerms.Bucket::getBucketOrd, InternalTerms.Bucket::setAggregations); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { final BucketOrder reduceOrder; if (isKeyOrder(order) == false) { reduceOrder = InternalOrder.key(true); @@ -167,7 +168,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } else { reduceOrder = order; } - result[ordIdx] = new StringTerms( + return new StringTerms( name, reduceOrder, order, @@ -181,8 +182,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw Arrays.asList(topBucketsPerOrd.get(ordIdx)), null ); - } - return result; + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index 69eff3630a8f4..a9ec0ba878ec0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -215,15 +215,9 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw filters.size() + (otherBucketKey == null ? 0 : 1), (offsetInOwningOrd, docCount, subAggregationResults) -> { if (offsetInOwningOrd < filters.size()) { - return new InternalFilters.InternalBucket( - filters.get(offsetInOwningOrd).key(), - docCount, - subAggregationResults, - keyed, - keyedBucket - ); + return new InternalFilters.InternalBucket(filters.get(offsetInOwningOrd).key(), docCount, subAggregationResults); } - return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults, keyed, keyedBucket); + return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults); }, buckets -> new InternalFilters(name, buckets, keyed, keyedBucket, metadata()) ); @@ -234,12 +228,12 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(filters.size() + (otherBucketKey == null ? 0 : 1)); for (QueryToFilterAdapter filter : filters) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs); buckets.add(bucket); } if (otherBucketKey != null) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs); buckets.add(bucket); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index a5dfb0d8efafa..c05759582346a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -32,26 +32,20 @@ public class InternalFilters extends InternalMultiBucketAggregation implements Filters { public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements Filters.Bucket { - private final boolean keyed; - private final boolean keyedBucket; private final String key; private long docCount; InternalAggregations aggregations; - public InternalBucket(String key, long docCount, InternalAggregations aggregations, boolean keyed, boolean keyedBucket) { + public InternalBucket(String key, long docCount, InternalAggregations aggregations) { this.key = key; - this.keyedBucket = keyedBucket; this.docCount = docCount; this.aggregations = aggregations; - this.keyed = keyed; } /** * Read from a stream. */ - public InternalBucket(StreamInput in, boolean keyed, boolean keyedBucket) throws IOException { - this.keyed = keyed; - this.keyedBucket = keyedBucket; + public InternalBucket(StreamInput in) throws IOException { key = in.readOptionalString(); docCount = in.readVLong(); aggregations = InternalAggregations.readFrom(in); @@ -84,8 +78,7 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed, boolean keyedBucket) throws IOException { if (keyed && keyedBucket) { builder.startObject(key); } else { @@ -97,7 +90,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -110,24 +102,20 @@ public boolean equals(Object other) { } InternalBucket that = (InternalBucket) other; return Objects.equals(key, that.key) - && Objects.equals(keyed, that.keyed) - && Objects.equals(keyedBucket, that.keyedBucket) && Objects.equals(docCount, that.docCount) && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { - return Objects.hash(getClass(), key, keyed, keyedBucket, docCount, aggregations); + return Objects.hash(getClass(), key, docCount, aggregations); } InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( key, samplingContext.scaleUp(docCount), - InternalAggregations.finalizeSampling(aggregations, samplingContext), - keyed, - keyedBucket + InternalAggregations.finalizeSampling(aggregations, samplingContext) ); } } @@ -155,7 +143,7 @@ public InternalFilters(StreamInput in) throws IOException { int size = in.readVInt(); List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalBucket(in, keyed, keyedBucket)); + buckets.add(new InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -182,7 +170,7 @@ public InternalFilters create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed, keyedBucket); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } @Override @@ -211,7 +199,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont ) { @Override protected InternalBucket createBucket(InternalBucket proto, long docCount, InternalAggregations aggregations) { - return new InternalBucket(proto.key, docCount, aggregations, proto.keyed, proto.keyedBucket); + return new InternalBucket(proto.key, docCount, aggregations); } }; @@ -252,7 +240,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed, keyedBucket); } if (keyed && keyedBucket) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java index 0e63e26e77a55..1d3614af08768 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -144,6 +144,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); while (ordsEnum.next()) { if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = newEmptyBucket(); } @@ -162,11 +163,10 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } } buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()); - } - return results; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildAggregation(name, requiredSize, Arrays.asList(topBucketsPerOrd.get(ordIdx)), metadata()) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index d56625ab28c51..6a32b41034503 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -152,7 +152,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalGeoGridBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index c972845468c2b..9e3c96da2e70b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -76,14 +77,12 @@ public int compareTo(InternalGeoGridBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field(Aggregation.CommonFields.KEY.getPreferredName(), getKeyAsString()); builder.field(Aggregation.CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index ed687df6377dd..5ea8cd035e580 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -84,7 +84,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { double roundKey = Double.longBitsToDouble(bucketValue); double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); + return new InternalHistogram.Bucket(key, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index cc2db63fa5ec5..1eb0226ad8c8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -340,7 +340,7 @@ private void addRoundedValue(long rounded, int doc, long owningBucketOrd, LeafBu @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { - return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); + return new InternalDateHistogram.Bucket(bucketValue, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); @@ -466,7 +466,6 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { new InternalDateHistogram.Bucket( rangeBucket.getFrom().toInstant().toEpochMilli(), rangeBucket.getDocCount(), - keyed, format, rangeBucket.getAggregations() ) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index f385f7c34f6b7..5a104055d9aec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -171,7 +171,6 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw (bucketValue, docCount, subAggregationResults) -> new InternalDateHistogram.Bucket( bucketValue, docCount, - keyed, formatter, subAggregationResults ), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 564abff2a9f97..d2badbeec4622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -53,19 +53,17 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final long key; - private final transient boolean keyed; - public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(long key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readLong(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readLong(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -101,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -116,7 +113,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -124,15 +120,10 @@ public int compareKey(Bucket other) { return Long.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -237,7 +228,7 @@ public InternalDateHistogram(StreamInput in) throws IOException { } else { downsampledResultsOffset = false; } - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -301,7 +292,7 @@ public InternalDateHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -398,7 +389,7 @@ public void accept(long key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new InternalDateHistogram.Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -546,7 +537,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -603,7 +594,7 @@ public InternalAggregation createAggregation(List { final double key; - private final transient boolean keyed; - public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(double key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readDouble(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readDouble(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -96,8 +94,7 @@ public Object getKey() { return key; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -111,7 +108,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -119,15 +115,10 @@ public int compareKey(Bucket other) { return Double.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -220,7 +211,7 @@ public InternalHistogram(StreamInput in) throws IOException { } format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -265,7 +256,7 @@ public InternalHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -373,7 +364,7 @@ public void accept(double key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -478,7 +469,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -508,7 +499,7 @@ public InternalAggregation createAggregation(List { - private final transient DocValueFormat format; private final BytesRef key; - private final boolean keyed; private final boolean isIpv6; private final int prefixLength; private final boolean appendPrefixLength; @@ -48,18 +46,14 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final InternalAggregations aggregations; public Bucket( - DocValueFormat format, BytesRef key, - boolean keyed, boolean isIpv6, int prefixLength, boolean appendPrefixLength, long docCount, InternalAggregations aggregations ) { - this.format = format; this.key = key; - this.keyed = keyed; this.isIpv6 = isIpv6; this.prefixLength = prefixLength; this.appendPrefixLength = appendPrefixLength; @@ -70,9 +64,7 @@ public Bucket( /** * Read from a stream. */ - public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { - this.format = format; - this.keyed = keyed; + public Bucket(StreamInput in) throws IOException { this.key = in.readBytesRef(); this.isIpv6 = in.readBoolean(); this.prefixLength = in.readVInt(); @@ -81,8 +73,7 @@ public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOExc this.aggregations = InternalAggregations.readFrom(in); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String key = DocValueFormat.IP.format(this.key); if (appendPrefixLength) { key = key + "/" + prefixLength; @@ -101,7 +92,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(IpPrefixAggregationBuilder.PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } private static BytesRef netmask(int prefixLength) { @@ -118,10 +108,6 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - public DocValueFormat getFormat() { - return format; - } - public BytesRef getKey() { return key; } @@ -162,14 +148,13 @@ public boolean equals(Object o) { && prefixLength == bucket.prefixLength && appendPrefixLength == bucket.appendPrefixLength && docCount == bucket.docCount - && Objects.equals(format, bucket.format) && Objects.equals(key, bucket.key) && Objects.equals(aggregations, bucket.aggregations); } @Override public int hashCode() { - return Objects.hash(format, key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); + return Objects.hash(key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); } @Override @@ -206,7 +191,7 @@ public InternalIpPrefix(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); minDocCount = in.readVLong(); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, format, keyed)); + buckets = in.readCollectionAsList(Bucket::new); } @Override @@ -298,7 +283,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalIpPrefix.Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -316,9 +301,7 @@ public InternalIpPrefix create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { return new Bucket( - format, prototype.key, - prototype.keyed, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, @@ -328,16 +311,7 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) } private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, long docCount) { - return new Bucket( - format, - prototype.key, - prototype.keyed, - prototype.isIpv6, - prototype.prefixLength, - prototype.appendPrefixLength, - docCount, - aggregations - ); + return new Bucket(prototype.key, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, docCount, aggregations); } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index e8ba0393208a0..38d26bfa9ae28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -172,51 +172,48 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw } try (LongArray bucketOrdsToCollect = bigArrays().newLongArray(totalOrdsToCollect)) { - int b = 0; + int[] b = new int[] { 0 }; for (long i = 0; i < owningBucketOrds.size(); i++) { BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(i)); while (ordsEnum.next()) { - bucketOrdsToCollect.set(b++, ordsEnum.ord()); + bucketOrdsToCollect.set(b[0]++, ordsEnum.ord()); } } var subAggregationResults = buildSubAggsForBuckets(bucketOrdsToCollect); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - b = 0; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { + b[0] = 0; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { List buckets = new ArrayList<>(bucketsInOrd.get(ordIdx)); BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds.get(ordIdx)); while (ordsEnum.next()) { long ordinal = ordsEnum.ord(); - if (bucketOrdsToCollect.get(b) != ordinal) { + if (bucketOrdsToCollect.get(b[0]) != ordinal) { throw AggregationErrors.iterationOrderChangedWithoutMutating( bucketOrds.toString(), ordinal, - bucketOrdsToCollect.get(b) + bucketOrdsToCollect.get(b[0]) ); } BytesRef ipAddress = new BytesRef(); ordsEnum.readValue(ipAddress); long docCount = bucketDocCount(ordinal); + checkRealMemoryCBForInternalBucket(); buckets.add( new InternalIpPrefix.Bucket( - config.format(), BytesRef.deepCopyOf(ipAddress), - keyed, ipPrefix.isIpv6, ipPrefix.prefixLength, ipPrefix.appendPrefixLength, docCount, - subAggregationResults.apply(b++) + subAggregationResults.apply(b[0]++) ) ); // NOTE: the aggregator is expected to return sorted results CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); } - results[ordIdx] = new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); - } - return results; + return new InternalIpPrefix(name, config.format(), keyed, minDocCount, buckets, metadata()); + }); } } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 9bde8d007c1b7..c10bb3543549e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -366,7 +366,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, docCount, subAggregationResults); + return new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, docCount, subAggregationResults); }, buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata()) ); @@ -378,7 +378,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (Range range : ranges) { - InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, 0, subAggs); + InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, 0, subAggs); buckets.add(bucket); } return new InternalBinaryRange(name, format, keyed, buckets, metadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 100bab7443a51..9571dfebc6069 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -39,23 +39,13 @@ public final class InternalBinaryRange extends InternalMultiBucketAggregation Bucket.createFromStream(stream, format, keyed)); + buckets = in.readCollectionAsList(stream -> Bucket.createFromStream(stream, format)); } @Override @@ -235,7 +222,7 @@ public InternalBinaryRange create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(format, keyed, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); + return new Bucket(format, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); } @Override @@ -251,7 +238,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return new Bucket(proto.format, proto.keyed, proto.key, proto.from, proto.to, docCount, aggregations); + return new Bucket(proto.format, proto.key, proto.from, proto.to, docCount, aggregations); } }; @@ -299,7 +286,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket range : buckets) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7b2858806c325..7291a099dd7f7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -34,19 +34,11 @@ public Bucket( boolean keyed, DocValueFormat formatter ) { - super(key, from, to, docCount, InternalAggregations.from(aggregations), keyed, formatter); + super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } - public Bucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat formatter - ) { - super(key, from, to, docCount, aggregations, keyed, formatter); + public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat formatter) { + super(key, from, to, docCount, aggregations, formatter); } @Override @@ -99,10 +91,9 @@ public Bucket createBucket( double to, long docCount, InternalAggregations aggregations, - boolean keyed, DocValueFormat formatter ) { - return new Bucket(key, from, to, docCount, aggregations, keyed, formatter); + return new Bucket(key, from, to, docCount, aggregations, formatter); } @Override @@ -113,7 +104,6 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.internalGetTo(), prototype.getDocCount(), aggregations, - prototype.getKeyed(), prototype.getFormat() ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java index d1c3761d45e82..9a33df4702c1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java @@ -23,8 +23,8 @@ public class InternalGeoDistance extends InternalRange ranges, DocValueFormat format, boolean keye } @SuppressWarnings("unchecked") - public B createBucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat format - ) { - return (B) new Bucket(key, from, to, docCount, aggregations, keyed, format); + public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat format) { + return (B) new Bucket(key, from, to, docCount, aggregations, format); } @SuppressWarnings("unchecked") @@ -232,7 +209,6 @@ public B createBucket(InternalAggregations aggregations, B prototype) { prototype.to, prototype.getDocCount(), aggregations, - prototype.keyed, prototype.format ); } @@ -285,7 +261,7 @@ public InternalRange(StreamInput in) throws IOException { } long docCount = in.readVLong(); InternalAggregations aggregations = InternalAggregations.readFrom(in); - ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, keyed, format)); + ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, format)); } this.ranges = ranges; } @@ -335,7 +311,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.keyed, proto.format); + return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.format); } }; @@ -371,7 +347,6 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { b.to, samplingContext.scaleUp(b.getDocCount()), InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext), - b.keyed, b.format ) ) @@ -390,7 +365,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (B range : ranges) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 0654a788a10a9..a4574e8081868 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -538,15 +538,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return rangeFactory.createBucket( - range.key, - range.originalFrom, - range.originalTo, - docCount, - subAggregationResults, - keyed, - format - ); + return rangeFactory.createBucket(range.key, range.originalFrom, range.originalTo, docCount, subAggregationResults, format); }, buckets -> rangeFactory.create(name, buckets, format, keyed, metadata()) ); @@ -564,7 +556,6 @@ public InternalAggregation buildEmptyAggregation() { range.originalTo, 0, subAggs, - keyed, format ); buckets.add(bucket); @@ -614,7 +605,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (RangeAggregator.Range range : ranges) { - buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, keyed, format)); + buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, format)); } return factory.create(name, buckets, format, keyed, metadata()); } @@ -886,7 +877,7 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { Range r = ranges[i]; InternalFilters.InternalBucket b = filters.getBuckets().get(i); buckets.add( - rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), keyed, format) + rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), format) ); } return rangeFactory.create(name(), buckets, format, keyed, filters.getMetadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 5c422a9dd4e32..6388eb3baaa84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -66,6 +66,8 @@ public abstract static class AbstractTermsBucket> buckets @@ -369,7 +372,7 @@ protected static XContentBuilder doXContentCommon( builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (AbstractTermsBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, showDocCountError); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index d04d7528ea938..db9da6ed67207 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -700,11 +700,10 @@ abstract class ResultStrategy< private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { if (valueCount == 0) { // no context in this reader - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildNoValuesResult(owningBucketOrds.get(ordIdx)); - } - return results; + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildNoValuesResult(owningBucketOrds.get(ordIdx)) + ); } try ( LongArray otherDocCount = bigArrays().newLongArray(owningBucketOrds.size(), true); @@ -731,6 +730,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep otherDocCount.increment(finalOrdIdx, docCount); if (docCount >= bucketCountThresholds.getShardMinDocCount()) { if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = buildEmptyTemporaryBucket(); } updater.updateBucket(spare, globalOrd, bucketOrd, docCount); @@ -742,6 +742,7 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep // Get the top buckets topBucketsPreOrd.set(ordIdx, buildBuckets((int) ordered.size())); for (int i = (int) ordered.size() - 1; i >= 0; --i) { + checkRealMemoryCBForInternalBucket(); B bucket = convertTempBucketToRealBucket(ordered.pop(), lookupGlobalOrd); topBucketsPreOrd.get(ordIdx)[i] = bucket; otherDocCount.increment(ordIdx, -bucket.getDocCount()); @@ -751,11 +752,10 @@ public void accept(long globalOrd, long bucketOrd, long docCount) throws IOExcep buildSubAggs(topBucketsPreOrd); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(topBucketsPreOrd.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)); - } - return results; + return GlobalOrdinalsStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCount.get(ordIdx), topBucketsPreOrd.get(ordIdx)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java index f179b7d05f9a4..3f75a27306ab4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java @@ -134,7 +134,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th // There is a condition (presumably when only one shard has a bucket?) where reduce is not called // and I end up with buckets that contravene the user's min_doc_count criteria in my reducer if (bucket.subsetDf >= minDocCount) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index 563321f56cb5f..5b9403840dfff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -145,6 +145,6 @@ public int hashCode() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index 6540cd2ee38da..64cebee880141 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; @@ -160,7 +158,7 @@ protected static XContentBuilder doXContentCommon(XContentBuilder builder, Param throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index eeb7305ac51fa..3f579947248bb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -157,8 +157,7 @@ public int hashCode() { return Objects.hash(getClass(), aggregations, score, format); } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -166,7 +165,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) builder.field(BG_COUNT, supersetDf); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 8e25c164d5f33..b94b1f5ea40b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -136,7 +136,7 @@ public void setAggregations(InternalAggregations aggregations) { } @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -145,7 +145,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java index 877bd2cac4b05..45ea1245ec38d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongRareTermsAggregator.java @@ -142,6 +142,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw long docCount = bucketDocCount(collectedBuckets.ord()); // if the key is below threshold, reinsert into the new ords if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); LongRareTerms.Bucket bucket = new LongRareTerms.Bucket(collectedBuckets.value(), docCount, null, format); bucket.bucketOrd = offset + bucketsInThisOwningBucketToCollect.add(collectedBuckets.value()); mergeMap.set(collectedBuckets.ord(), bucket.bucketOrd); @@ -173,21 +174,12 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw * Now build the results! */ buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + + return LongRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { LongRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); Arrays.sort(buckets, ORDER.comparator()); - result[ordIdx] = new LongRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(buckets), - maxDocCount, - filters.get(ordIdx) - ); - } - return result; + return new LongRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java index c02ed5509e6ae..6ae47d5975479 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/MapStringTermsAggregator.java @@ -304,6 +304,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro continue; } if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = emptyBucketBuilder.get(); } updateBucket(spare, ordsEnum, docCount); @@ -320,11 +321,11 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro } buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); - } - return result; + + return MapStringTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java index e10f0b8944027..ce89b95b76a05 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/NumericTermsAggregator.java @@ -185,6 +185,7 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro continue; } if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = emptyBucketBuilder.get(); } updateBucket(spare, ordsEnum, docCount); @@ -203,11 +204,10 @@ private InternalAggregation[] buildAggregations(LongArray owningBucketOrds) thro buildSubAggs(topBucketsPerOrd); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(topBucketsPerOrd.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); - } - return result; + return NumericTermsAggregator.this.buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(owningBucketOrds.get(ordIdx), otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java index 7200c33c71f70..8a2c9d52f4212 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringRareTermsAggregator.java @@ -145,6 +145,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw long docCount = bucketDocCount(collectedBuckets.ord()); // if the key is below threshold, reinsert into the new ords if (docCount <= maxDocCount) { + checkRealMemoryCBForInternalBucket(); StringRareTerms.Bucket bucket = new StringRareTerms.Bucket( BytesRef.deepCopyOf(scratch), docCount, @@ -181,21 +182,12 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw * Now build the results! */ buildSubAggsForAllBuckets(rarestPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { + + return StringRareTermsAggregator.this.buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> { StringRareTerms.Bucket[] buckets = rarestPerOrd.get(ordIdx); Arrays.sort(buckets, ORDER.comparator()); - result[ordIdx] = new StringRareTerms( - name, - ORDER, - metadata(), - format, - Arrays.asList(buckets), - maxDocCount, - filters.get(ordIdx) - ); - } - return result; + return new StringRareTerms(name, ORDER, metadata(), format, Arrays.asList(buckets), maxDocCount, filters.get(ordIdx)); + }); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 7755f1db6a3ee..8047d1f06990f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -111,7 +111,7 @@ public boolean canLeadReduction() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, 0L, 0, Collections.emptyList()); + return doXContentCommon(builder, params, false, 0L, 0, Collections.emptyList()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java index 0d767e356108a..cf65f1ff7c835 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricsAggregator.java @@ -38,10 +38,6 @@ protected MetricsAggregator(String name, AggregationContext context, Aggregator @Override public final InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = buildAggregation(owningBucketOrds.get(ordIdx)); - } - return results; + return buildAggregations(Math.toIntExact(owningBucketOrds.size()), ordIdx -> buildAggregation(owningBucketOrds.get(ordIdx))); } } diff --git a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java index 7bf172388eccd..0db3de9abdb7b 100644 --- a/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java +++ b/server/src/main/java/org/elasticsearch/transport/BytesTransportRequest.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.RefCounted; import java.io.IOException; @@ -22,7 +21,7 @@ * A specialized, bytes only request, that can potentially be optimized on the network * layer, specifically for the same large buffer send to several nodes. */ -public class BytesTransportRequest extends TransportRequest implements RefCounted { +public class BytesTransportRequest extends TransportRequest { final ReleasableBytesReference bytes; private final TransportVersion version; diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index ba575cc642a81..6191922f13094 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8702003 8.15.4,8702003 8.16.0,8772001 +8.16.1,8772004 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index c54aea88613f5..f84d69af727ac 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8512000 8.15.4,8512000 8.16.0,8518000 +8.16.1,8518000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 7a31f0dcb4631..a7058e5d6cd8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -1057,6 +1057,8 @@ public static NodeStats createNodeStats() { randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), + randomLongBetween(0, maxStatValue), randomLongBetween(0, maxStatValue) ); } diff --git a/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java new file mode 100644 index 0000000000000..fe839c28aab88 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/datastreams/ReindexDataStreamResponseTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.action.datastreams; + +import org.elasticsearch.action.datastreams.ReindexDataStreamAction.ReindexDataStreamResponse; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; +import static org.hamcrest.Matchers.equalTo; + +public class ReindexDataStreamResponseTests extends AbstractWireSerializingTestCase { + @Override + protected Writeable.Reader instanceReader() { + return ReindexDataStreamResponse::new; + } + + @Override + protected ReindexDataStreamResponse createTestInstance() { + return new ReindexDataStreamResponse(randomAlphaOfLength(40)); + } + + @Override + protected ReindexDataStreamResponse mutateInstance(ReindexDataStreamResponse instance) { + return createTestInstance(); + } + + public void testToXContent() throws IOException { + ReindexDataStreamResponse response = createTestInstance(); + try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { + builder.humanReadable(true); + response.toXContent(builder, EMPTY_PARAMS); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), equalTo(Map.of("task", response.getTaskId()))); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index 0bc5c69d8ad4b..dc1698e3459ec 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -134,7 +134,12 @@ public void setup() { ); fileSettingsService = spy( - new FileSettingsService(clusterService, mock(ReservedClusterStateService.class), newEnvironment(Settings.EMPTY)) + new FileSettingsService( + clusterService, + mock(ReservedClusterStateService.class), + newEnvironment(Settings.EMPTY), + new FileSettingsService.FileSettingsHealthIndicatorService() + ) ); } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 51796f404c283..d54ac9c66d9a5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -639,7 +639,6 @@ public void testMergeAggs() throws InterruptedException { 10000D, count, InternalAggregations.EMPTY, - false, DocValueFormat.RAW ); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); @@ -1498,15 +1497,7 @@ private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Inde private static InternalAggregations createDeterminsticAggregation(String maxAggName, String rangeAggName, double value, int count) { Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap()); InternalDateRange.Factory factory = new InternalDateRange.Factory(); - InternalDateRange.Bucket bucket = factory.createBucket( - "bucket", - 0D, - 10000D, - count, - InternalAggregations.EMPTY, - false, - DocValueFormat.RAW - ); + InternalDateRange.Bucket bucket = factory.createBucket("bucket", 0D, 10000D, count, InternalAggregations.EMPTY, DocValueFormat.RAW); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java index 4518bd655346a..226f5dbf3b2ff 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinValidationServiceTests.java @@ -22,10 +22,14 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.component.Lifecycle; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistryTests; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -38,6 +42,7 @@ import org.elasticsearch.test.transport.MockTransport; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.CloseableConnection; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.TestTransportChannel; @@ -49,6 +54,7 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ToXContent; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; @@ -155,6 +161,7 @@ public void doRun() { final var joinValidationService = new JoinValidationService( settings, transportService, + writableRegistry(), () -> usually() ? clusterState : null, clusterState::metadata, List.of() @@ -286,7 +293,14 @@ public void writeTo(StreamOutput out) {} ); // registers request handler - new JoinValidationService(Settings.EMPTY, joiningNodeTransportService, () -> clusterState, clusterState::metadata, List.of()); + new JoinValidationService( + Settings.EMPTY, + joiningNodeTransportService, + writableRegistry(), + () -> clusterState, + clusterState::metadata, + List.of() + ); joiningNodeTransportService.start(); joiningNodeTransportService.acceptIncomingRequests(); @@ -325,6 +339,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req final var joinValidationService = new JoinValidationService( Settings.EMPTY, masterTransportService, + writableRegistry(), () -> clusterState, clusterState::metadata, List.of() @@ -349,7 +364,7 @@ protected void onSendRequest(long requestId, String action, TransportRequest req } } - public void testJoinValidationRejectsMismatchedClusterUUID() { + public void testJoinValidationRejectsMismatchedClusterUUID() throws IOException { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); final var localNode = DiscoveryNodeUtils.create("node0"); @@ -371,7 +386,14 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { final var settings = Settings.builder().put(Environment.PATH_DATA_SETTING.getKey(), dataPath).build(); // registers request handler - new JoinValidationService(settings, transportService, () -> localClusterState, localClusterState::metadata, List.of()); + new JoinValidationService( + settings, + transportService, + writableRegistry(), + () -> localClusterState, + localClusterState::metadata, + List.of() + ); transportService.start(); transportService.acceptIncomingRequests(); @@ -384,7 +406,7 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(otherClusterState), + serializeClusterState(otherClusterState), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -401,6 +423,22 @@ public void testJoinValidationRejectsMismatchedClusterUUID() { ); } + private static BytesTransportRequest serializeClusterState(ClusterState clusterState) { + try ( + var bytesStream = new BytesStreamOutput(); + var compressedStream = new OutputStreamStreamOutput( + CompressorFactory.COMPRESSOR.threadLocalOutputStream(Streams.flushOnCloseStream(bytesStream)) + ) + ) { + compressedStream.setTransportVersion(TransportVersion.current()); + clusterState.writeTo(compressedStream); + compressedStream.flush(); + return new BytesTransportRequest(ReleasableBytesReference.wrap(bytesStream.bytes()), TransportVersion.current()); + } catch (Exception e) { + throw new AssertionError(e); + } + } + public void testJoinValidationRunsJoinValidators() { final var deterministicTaskQueue = new DeterministicTaskQueue(); final var mockTransport = new MockTransport(); @@ -420,11 +458,12 @@ public void testJoinValidationRunsJoinValidators() { new JoinValidationService( Settings.EMPTY, transportService, + writableRegistry(), () -> localClusterState, localClusterState::metadata, List.of((node, state) -> { assertSame(node, localNode); - assertSame(state, stateForValidation); + assertEquals(state.stateUUID(), stateForValidation.stateUUID()); throw new IllegalStateException("simulated validation failure"); }) ); // registers request handler @@ -435,7 +474,7 @@ public void testJoinValidationRunsJoinValidators() { transportService.sendRequest( localNode, JoinValidationService.JOIN_VALIDATE_ACTION_NAME, - new ValidateJoinRequest(stateForValidation), + serializeClusterState(stateForValidation), new ActionListenerResponseHandler<>(future, in -> TransportResponse.Empty.INSTANCE, TransportResponseHandler.TRANSPORT_WORKER) ); deterministicTaskQueue.runAllTasks(); @@ -467,9 +506,16 @@ protected void onSendRequest(long requestId, String action, TransportRequest req null, Collections.emptySet() ); - final var joinValidationService = new JoinValidationService(Settings.EMPTY, masterTransportService, () -> null, () -> { - throw new AssertionError("should not be called"); - }, List.of()); + final var joinValidationService = new JoinValidationService( + Settings.EMPTY, + masterTransportService, + writableRegistry(), + () -> null, + () -> { + throw new AssertionError("should not be called"); + }, + List.of() + ); masterTransportService.start(); masterTransportService.acceptIncomingRequests(); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..57c360dc6a92a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -10,163 +10,90 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; import java.util.Locale; +import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DateMathExpressionResolverTests extends ESTestCase { - private final Context context = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictExpand(), - SystemIndexAccessLevel.NONE - ); + private final long now = randomMillisUpToYear9999(); + private final LongSupplier getTime = () -> now; - private static ZonedDateTime dateFromMillis(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); - } + public void testNoDateMathExpression() { + String expression = randomAlphaOfLength(10); + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); - private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { - DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); - return dateFormatter.format(zonedDateTime); + expression = "*"; + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); } - public void testNormal() throws Exception { - int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); - for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); - } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(indexExpressions.size())); - for (int i = 0; i < indexExpressions.size(); i++) { - assertThat(result.get(i), equalTo(indexExpressions.get(i))); - } - } + public void testExpression() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + result = DateMathExpressionResolver.resolveExpression("<.watch_history-{now}>", getTime); + assertThat(result, equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( - "<-before-inner-{now}>", - "-", - "", - "<-after-inner-{now}>", - "-" - ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-after-outer-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))) - ) - ); - Context noWildcardExpandContext = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictSingleIndexNoExpandForbidClosed(), - SystemIndexAccessLevel.NONE - ); - result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-"), - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-") - ) - ); - } + String result = DateMathExpressionResolver.resolveExpression("<-before-inner-{now}>", getTime); + assertThat(result, equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "*")); + + result = DateMathExpressionResolver.resolveExpression("<-after-inner-{now}>", getTime); + assertThat(result, equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); - assertThat(result.size(), equalTo(0)); } - public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + public void testExpression_Static() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-test>", getTime); + assertThat(result, equalTo(".marvel-test")); } - public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); - assertThat(result.size(), equalTo(1)); + public void testExpression_MultiParts() { + String result = DateMathExpressionResolver.resolveExpression("<.text1-{now/d}-text2-{now/M}>", getTime); assertThat( - result.get(0), + result, equalTo( ".text1-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "-text2-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()).withDayOfMonth(1)) + + formatDate("uuuu.MM.dd", dateFromMillis(now).withDayOfMonth(1)) ) ); } - public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); - assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - } - - public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + public void testExpression_CustomFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{yyyy.MM.dd}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + public void testExpression_EscapeStatic() { + String result = DateMathExpressionResolver.resolveExpression("<.mar\\{v\\}el-{now/d}>", getTime); + assertThat(result, equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") - ); - assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + public void testExpression_EscapeDateFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{'\\{year\\}'yyyy}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(now)))); } - public void testExpression_CustomTimeZoneInIndexName() throws Exception { + public void testExpression_CustomTimeZoneInIndexName() { ZoneId timeZone; int hoursOffset; int minutesOffset = 0; @@ -194,57 +121,57 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { // rounding to today 00:00 now = ZonedDateTime.now(ZoneOffset.UTC).withHour(0).withMinute(0).withSecond(0); } - Context context = new Context( - this.context.getState(), - this.context.getOptions(), - now.toInstant().toEpochMilli(), - SystemIndexAccessLevel.NONE, - name -> false, - name -> false - ); - List results = DateMathExpressionResolver.resolve( - context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + + String result = DateMathExpressionResolver.resolveExpression( + "<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>", + () -> now.toInstant().toEpochMilli() ); - assertThat(results.size(), equalTo(1)); - logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, result); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } - public void testExpressionInvalidUnescaped() throws Exception { + public void testExpressionInvalidUnescaped() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolveExpression("<.mar}vel-{now/d}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); } - public void testExpressionInvalidDateMathFormat() throws Exception { + public void testExpressionInvalidDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - public void testExpressionInvalidEmptyDateMathFormat() throws Exception { + public void testExpressionInvalidEmptyDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); } - public void testExpressionInvalidOpenEnded() throws Exception { + public void testExpressionInvalidOpenEnded() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + static ZonedDateTime dateFromMillis(long millis) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); + } + + static String formatDate(String pattern, ZonedDateTime zonedDateTime) { + DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + return dateFormatter.format(zonedDateTime); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 99470918ce063..30895767c33c2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -47,6 +48,7 @@ import java.time.LocalDate; import java.time.ZoneOffset; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -58,6 +60,8 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFailureStore; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.dateFromMillis; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.formatDate; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_HIDDEN_SETTING; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; @@ -885,10 +889,7 @@ public void testConcreteIndicesIgnoreIndicesEmptyRequest() { IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE ); - assertThat( - newHashSet(indexNameExpressionResolver.concreteIndexNames(context, new String[] {})), - equalTo(newHashSet("kuku", "testXXX")) - ); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context)), equalTo(newHashSet("kuku", "testXXX"))); } public void testConcreteIndicesNoIndicesErrorMessage() { @@ -1408,52 +1409,56 @@ public void testConcreteIndicesWildcardNoMatch() { } } - public void testIsAllIndicesNull() throws Exception { + public void testIsAllIndicesNull() { assertThat(IndexNameExpressionResolver.isAllIndices(null), equalTo(true)); } - public void testIsAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Collections.emptyList()), equalTo(true)); + public void testIsAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of()), equalTo(true)); + } + + public void testIsAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all")), equalTo(true)); } - public void testIsAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all")), equalTo(true)); + public void testIsAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all", "other")), equalTo(false)); } - public void testIsAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsNoneIndices() { + assertThat(IndexNameExpressionResolver.isNoneExpression(new String[] { "*", "-*" }), equalTo(true)); } - public void testIsAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("*")), equalTo(false)); + public void testIsAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("*")), equalTo(false)); } - public void testIsExplicitAllIndicesNull() throws Exception { + public void testIsExplicitAllIndicesNull() { assertThat(IndexNameExpressionResolver.isExplicitAllPattern(null), equalTo(false)); } - public void testIsExplicitAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Collections.emptyList()), equalTo(false)); + public void testIsExplicitAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of()), equalTo(false)); } - public void testIsExplicitAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all")), equalTo(true)); + public void testIsExplicitAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all")), equalTo(true)); } - public void testIsExplicitAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsExplicitAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all", "other")), equalTo(false)); } - public void testIsExplicitAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsExplicitAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsExplicitAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("*")), equalTo(false)); + public void testIsExplicitAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("*")), equalTo(false)); } public void testIndexOptionsFailClosedIndicesAndAliases() { @@ -1580,16 +1585,13 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("test-0", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") - ); - assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of("test-0", "test-1", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals(Set.of("test-1", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1598,16 +1600,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of("alias-0", "alias-1"); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "test-1", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1742,7 +1744,7 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of("test-0", "test-alias"); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1771,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1853,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1891,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1927,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1968,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -2328,40 +2330,40 @@ public void testFullWildcardSystemIndexResolutionWithExpandHiddenAllowed() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); } public void testWildcardSystemIndexResolutionMultipleMatchesAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); } public void testWildcardSystemIndexResolutionSingleMatchAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); } public void testSingleSystemIndexResolutionAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta")); } public void testFullWildcardSystemIndicesAreHidden() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(randomFrom("*", "_all")); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining("some-other-index")); } public void testFullWildcardSystemIndexResolutionDeprecated() { @@ -2370,8 +2372,8 @@ public void testFullWildcardSystemIndexResolutionDeprecated() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); assertWarnings( true, new DeprecationWarning( @@ -2388,8 +2390,8 @@ public void testSingleSystemIndexResolutionDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".ml-meta")); assertWarnings( true, new DeprecationWarning( @@ -2405,8 +2407,8 @@ public void testWildcardSystemIndexResolutionSingleMatchDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); assertWarnings( true, new DeprecationWarning( @@ -2423,8 +2425,8 @@ public void testWildcardSystemIndexResolutionMultipleMatchesDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); assertWarnings( true, new DeprecationWarning( @@ -2479,8 +2481,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2496,8 +2498,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2515,8 +2517,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2526,8 +2528,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2538,8 +2540,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2549,8 +2551,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -3073,7 +3075,6 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); - ; } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; @@ -3239,6 +3240,37 @@ public void testDataStreamsNames() { assertThat(names, empty()); } + public void testDateMathMixedArray() { + long now = System.currentTimeMillis(); + String dataMathIndex1 = ".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)); + String dateMathIndex2 = ".logstash-" + formatDate("uuuu.MM", dateFromMillis(now).withDayOfMonth(1)); + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + ClusterState.builder(new ClusterName("_name")) + .metadata( + Metadata.builder() + .put(indexBuilder("name1")) + .put(indexBuilder("name2")) + .put(indexBuilder(dataMathIndex1)) + .put(indexBuilder(dateMathIndex2)) + ) + .build(), + IndicesOptions.strictExpand(), + now, + SystemIndexAccessLevel.NONE, + Predicates.never(), + Predicates.never() + ); + Collection result = IndexNameExpressionResolver.resolveExpressionsToResources( + context, + "name1", + "<.marvel-{now/d}>", + "name2", + "<.logstash-{now/M{uuuu.MM}}>" + ); + assertThat(result.size(), equalTo(4)); + assertThat(result, contains("name1", dataMathIndex1, "name2", dateMathIndex2)); + } + public void testMathExpressionSupport() { Instant instant = LocalDate.of(2021, 01, 11).atStartOfDay().toInstant(ZoneOffset.UTC); String resolved = IndexNameExpressionResolver.resolveDateMathExpression("", instant.toEpochMilli()); @@ -3418,10 +3450,6 @@ private ClusterState systemIndexTestClusterState() { return ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); } - private List resolveConcreteIndexNameList(ClusterState state, SearchRequest request) { - return Arrays.stream(indexNameExpressionResolver.concreteIndices(state, request)).map(Index::getName).toList(); - } - private static IndexMetadata.Builder indexBuilder(String index, Settings additionalSettings) { return IndexMetadata.builder(index).settings(indexSettings(IndexVersion.current(), 1, 0).put(additionalSettings)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..6a26e7948784c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,23 +13,20 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class WildcardExpressionResolverTests extends ESTestCase { @@ -50,107 +47,31 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*")), + equalTo(newHashSet("kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") - ) - ), - equalTo(newHashSet("testXYY")) - ); - if (indicesOptions == IndicesOptions.lenientExpandOpen()) { - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) - ); - } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(context, "testXXX", "-testXXX") - ); - assertEquals("-testXXX", infe.getIndex().getName()); - } - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) - ); - } - - public void testConvertWildcardsTests() { - Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").putAlias(AliasMetadata.builder("alias1")).putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testXYY").putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testYYY").putAlias(AliasMetadata.builder("alias3"))) - .put(indexBuilder("kuku")); - ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - - IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.lenientExpandOpen(), - SystemIndexAccessLevel.NONE - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); } public void testConvertWildcardsOpenClosedIndicesTests() { Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXXY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXYY").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("testYYY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testYYX").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("kuku").state(IndexMetadata.State.OPEN)); + .put(indexBuilder("testXXX").state(State.OPEN)) + .put(indexBuilder("testXXY").state(State.OPEN)) + .put(indexBuilder("testXYY").state(State.CLOSE)) + .put(indexBuilder("testYYY").state(State.OPEN)) + .put(indexBuilder("testYYX").state(State.CLOSE)) + .put(indexBuilder("kuku").state(State.OPEN)); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( @@ -159,7 +80,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -168,7 +89,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -177,26 +98,9 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY")) ); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(true, true, false, false), - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(context, "testX*").size(), equalTo(0)); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(false, true, false, false), - SystemIndexAccessLevel.NONE - ); - IndexNameExpressionResolver.Context finalContext = context; - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(finalContext, "testX*") - ); - assertThat(infe.getIndex().getName(), is("testX*")); } // issue #13334 @@ -217,28 +121,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y")), equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*")), equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*")), equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X")).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X")).size(), equalTo(0) ); } @@ -259,26 +162,6 @@ public void testAll() { newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - IndicesOptions noExpandOptions = IndicesOptions.fromOptions( - randomBoolean(), - true, - false, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - IndexNameExpressionResolver.Context noExpandContext = new IndexNameExpressionResolver.Context( - state, - noExpandOptions, - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } public void testAllAliases() { @@ -506,112 +389,47 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertEquals(0, indices.size()); } { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesStrictContext, - Collections.singletonList("foo_a*") - ) + Set indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + skipAliasesStrictContext, + "foo_a*" ); - assertEquals("foo_a*", infe.getIndex().getName()); + assertThat(indices, empty()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesLenientContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(skipAliasesStrictContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } - IndicesOptions noExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions(true, false, false, false, true, false, true, false); - IndexNameExpressionResolver.Context noExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - noExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - noExpandNoAliasesContext, - List.of("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( - false, - true, - false, - false, - true, - false, - true, - false - ); - IndexNameExpressionResolver.Context strictNoExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - strictNoExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(strictNoExpandNoAliasesContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } } public void testResolveDataStreams() { @@ -654,17 +472,14 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("bar_*") - ); + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "bar_*"); assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } @@ -691,9 +506,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -707,9 +522,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -748,9 +563,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -764,9 +579,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -808,24 +623,17 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + indicesAndAliasesContext, + "*" + ); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*"); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") - ); - assertThat( - iae.getMessage(), - containsString("The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead") - ); } private static IndexMetadata.Builder indexBuilder(String index, boolean hidden) { @@ -838,10 +646,6 @@ private static IndexMetadata.Builder indexBuilder(String index) { } private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) - ); - assertEquals(wildcardExpression, infe.getIndex().getName()); + assertThat(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, wildcardExpression), empty()); } } diff --git a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java index ad298e7aa8307..3fd8535cd5c27 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -107,4 +107,10 @@ public void testGetDoubleLE() { Exception e = expectThrows(ArrayIndexOutOfBoundsException.class, () -> ref.getDoubleLE(9)); assertThat(e.getMessage(), equalTo("Index 9 out of bounds for length 9")); } + + public void testCopyBytes() { + var data = randomByteArrayOfLength(between(1024, 1024 * 1024 * 50)); + var copy = BytesReference.copyBytes(new BytesArray(data)); + assertArrayEquals(data, BytesReference.toBytes(copy)); + } } diff --git a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java index 8cd61453a3391..27dc0be673abb 100644 --- a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java +++ b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java @@ -85,11 +85,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java index b4130120372a1..8da7ada91856d 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexingPressureTests.java @@ -37,6 +37,31 @@ public void testMemoryLimitSettingsFallbackToOldSingleLimitSetting() { assertThat(IndexingPressure.MAX_REPLICA_BYTES.get(settings), Matchers.equalTo(ByteSizeValue.ofKb(30))); } + public void testHighAndLowWatermarkSplits() { + IndexingPressure indexingPressure = new IndexingPressure(settings); + + try ( + Releasable ignored1 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(6).getBytes(), false); + Releasable ignored2 = indexingPressure.markCoordinatingOperationStarted(10, ByteSizeValue.ofKb(2).getBytes(), false) + ) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 0L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(1025, 10000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + + try (Releasable ignored3 = indexingPressure.markPrimaryOperationStarted(10, ByteSizeValue.ofKb(1).getBytes(), false)) { + assertFalse(indexingPressure.shouldSplitBulk(randomIntBetween(1, 127))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 0L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + assertTrue(indexingPressure.shouldSplitBulk(randomIntBetween(129, 1000))); + assertEquals(indexingPressure.stats().getHighWaterMarkSplits(), 1L); + assertEquals(indexingPressure.stats().getLowWaterMarkSplits(), 1L); + } + } + } + public void testHighAndLowWatermarkSettings() { IndexingPressure indexingPressure = new IndexingPressure(settings); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java new file mode 100644 index 0000000000000..059cb15551acb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsLoaderTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class PluginsLoaderTests extends ESTestCase { + + public void testToModuleName() { + assertThat(PluginsLoader.toModuleName("module.name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name1"), equalTo("module.name1")); + assertThat(PluginsLoader.toModuleName("1module-name"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module!@#name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("!module-name!"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("module_name"), equalTo("module_name")); + assertThat(PluginsLoader.toModuleName("-module-name-"), equalTo("module.name")); + assertThat(PluginsLoader.toModuleName("_module_name"), equalTo("_module_name")); + assertThat(PluginsLoader.toModuleName("_"), equalTo("_")); + } +} diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index f927a12b50da3..b84f1d2c7635c 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -66,12 +66,12 @@ public class PluginsServiceTests extends ESTestCase { public static class FilterablePlugin extends Plugin implements ScriptPlugin {} static PluginsService newPluginsService(Settings settings) { - return new PluginsService(settings, null, null, TestEnvironment.newEnvironment(settings).pluginsFile()) { + return new PluginsService(settings, null, new PluginsLoader(null, TestEnvironment.newEnvironment(settings).pluginsFile()) { @Override protected void addServerExportsService(Map> qualifiedExports) { // tests don't run modular } - }; + }); } static PluginsService newMockPluginsService(List> classpathPlugins) { @@ -875,20 +875,6 @@ public void testCanCreateAClassLoader() { assertEquals(this.getClass().getClassLoader(), loader.getParent()); } - public void testToModuleName() { - assertThat(PluginsService.toModuleName("module.name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name1"), equalTo("module.name1")); - assertThat(PluginsService.toModuleName("1module-name"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module!@#name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("!module-name!"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("module_name"), equalTo("module_name")); - assertThat(PluginsService.toModuleName("-module-name-"), equalTo("module.name")); - assertThat(PluginsService.toModuleName("_module_name"), equalTo("_module_name")); - assertThat(PluginsService.toModuleName("_"), equalTo("_")); - } - static final class Loader extends ClassLoader { Loader(ClassLoader parent) { super(parent); @@ -896,16 +882,17 @@ static final class Loader extends ClassLoader { } // Closes the URLClassLoaders and UberModuleClassloaders of plugins loaded by the given plugin service. + // We can use the direct ClassLoader from the plugin because tests do not use any parent SPI ClassLoaders. static void closePluginLoaders(PluginsService pluginService) { for (var lp : pluginService.plugins()) { - if (lp.loader() instanceof URLClassLoader urlClassLoader) { + if (lp.instance().getClass().getClassLoader() instanceof URLClassLoader urlClassLoader) { try { PrivilegedOperations.closeURLClassLoader(urlClassLoader); } catch (IOException unexpected) { throw new UncheckedIOException(unexpected); } } - if (lp.loader() instanceof UberModuleClassLoader loader) { + if (lp.instance().getClass().getClassLoader() instanceof UberModuleClassLoader loader) { try { PrivilegedOperations.closeURLClassLoader(loader.getInternalLoader()); } catch (Exception e) { diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java new file mode 100644 index 0000000000000..03d1adff42c4e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.FAILURE_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.NO_CHANGES_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.STALE_SETTINGS_IMPACT; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.SUCCESS_SYMPTOM; + +/** + * Here, we test {@link FileSettingsHealthIndicatorService} in isolation; + * we do not test that {@link FileSettingsService} uses it correctly. + */ +public class FileSettingsHealthIndicatorServiceTests extends ESTestCase { + + FileSettingsHealthIndicatorService healthIndicatorService; + + @Before + public void initialize() { + healthIndicatorService = new FileSettingsHealthIndicatorService(); + } + + public void testInitiallyGreen() { + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } + + public void testGreenYellowYellowGreen() { + healthIndicatorService.changeOccurred(); + // This is a strange case: a change occurred, but neither success nor failure have been reported yet. + // While the change is still in progress, we don't change the status. + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie 1"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 1L, "most_recent_failure", "whoopsie 1")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie #2"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 2L, "most_recent_failure", "whoopsie #2")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.successOccurred(); + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 0db29588c4298..ae60a21b6fc22 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -33,6 +34,7 @@ import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -78,6 +80,8 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; public class FileSettingsServiceTests extends ESTestCase { private static final Logger logger = LogManager.getLogger(FileSettingsServiceTests.class); @@ -86,6 +90,7 @@ public class FileSettingsServiceTests extends ESTestCase { private ReservedClusterStateService controller; private ThreadPool threadpool; private FileSettingsService fileSettingsService; + private FileSettingsHealthIndicatorService healthIndicatorService; @Before public void setUp() throws Exception { @@ -131,7 +136,8 @@ public void setUp() throws Exception { List.of(new ReservedClusterSettingsAction(clusterSettings)) ) ); - fileSettingsService = spy(new FileSettingsService(clusterService, controller, env)); + healthIndicatorService = mock(FileSettingsHealthIndicatorService.class); + fileSettingsService = spy(new FileSettingsService(clusterService, controller, env, healthIndicatorService)); } @After @@ -162,6 +168,7 @@ public void testStartStop() { assertTrue(fileSettingsService.watching()); fileSettingsService.stop(); assertFalse(fileSettingsService.watching()); + verifyNoInteractions(healthIndicatorService); } public void testOperatorDirName() { @@ -208,6 +215,10 @@ public void testInitialFileError() throws Exception { verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalStateException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -232,6 +243,10 @@ public void testInitialFileWorks() throws Exception { verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -267,6 +282,10 @@ public void testProcessFileChanges() throws Exception { verify(fileSettingsService, times(1)).processFileChanges(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(2)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -321,6 +340,11 @@ public void testInvalidJSON() throws Exception { // Note: the name "processFileOnServiceStart" is a bit misleading because it is not // referring to fileSettingsService.start(). Rather, it is referring to the initialization // of the watcher thread itself, which occurs asynchronously when clusterChanged is first called. + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalArgumentException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } private static void awaitOrBust(CyclicBarrier barrier) { @@ -373,6 +397,12 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { fileSettingsService.close(); // let the deadlocked thread end, so we can cleanly exit the test deadThreadLatch.countDown(); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred( + argThat(s -> s.startsWith(FailedToCommitClusterStateException.class.getName())) + ); + verifyNoMoreInteractions(healthIndicatorService); } public void testHandleSnapshotRestoreClearsMetadata() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index b7d38f6f299c7..2fdb3daa26da4 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -906,11 +906,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java new file mode 100644 index 0000000000000..c4a1699181efc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/MultiVectorScoreScriptUtilsTests.java @@ -0,0 +1,342 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script; + +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorScriptDocValuesTests; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimDotProduct; +import org.elasticsearch.script.MultiVectorScoreScriptUtils.MaxSimInvHamming; +import org.elasticsearch.script.field.vectors.BitMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.ByteMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.FloatMultiDenseVectorDocValuesField; +import org.elasticsearch.script.field.vectors.MultiDenseVectorDocValuesField; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HexFormat; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class MultiVectorScoreScriptUtilsTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testFloatMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][][] docVectors = new float[][][] { + { { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }, { 100.0f, 200.0f, -50.0f, 10.0f, -150.0f } } }; + float[][] docMagnitudes = new float[][] { { 0.0f, 0.0f } }; + for (int i = 0; i < docVectors.length; i++) { + for (int j = 0; j < docVectors[i].length; j++) { + docMagnitudes[i][j] = (float) Math.sqrt(VectorUtil.dotProduct(docVectors[i][j], docVectors[i][j])); + } + } + + List> queryVector = List.of(Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f)); + List> invalidQueryVector = List.of(Arrays.asList(0.5, 111.3)); + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ), + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(docVectors, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(docMagnitudes), + "test", + ElementType.FLOAT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + // Test max similarity dot product + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + float maxSimDotProductExpected = 65425.625f; // Adjust this value based on expected max similarity + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + maxSimDotProductExpected, + maxSimDotProduct.maxSimDotProduct(), + 0.001 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MultiVectorScoreScriptUtils.MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat(e.getMessage(), containsString("hamming distance is only supported for byte or bit vectors")); + + // Check scripting infrastructure integration + assertEquals(65425.6249, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct() + ); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testByteMultiVectorClassBindings() throws IOException { + String fieldName = "vector"; + int dims = 5; + float[][] docVector = new float[][] { { 1, 127, -128, 5, -10 } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> queryVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 1, 125, -12, 2, 4 })); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "test", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimInvHamming(scoreScript, invalidQueryVector, fieldName)); + assertThat( + e.getMessage(), + containsString("query vector has a different number of dimensions [2] than the document vectors [5]") + ); + + // Check scripting infrastructure integration + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, queryVector, fieldName).maxSimDotProduct(), 0.001); + assertEquals(17382.0, new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName).maxSimDotProduct(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, queryVector, fieldName).maxSimInvHamming(), 0.001); + assertEquals(0.675, new MaxSimInvHamming(scoreScript, hexidecimalString, fieldName).maxSimInvHamming(), 0.001); + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + when(scoreScript._getDocId()).thenReturn(1); + e = expectThrows(IllegalArgumentException.class, maxSimDotProduct::maxSimDotProduct); + assertEquals("A document doesn't have a value for a multi-vector field!", e.getMessage()); + } + } + + public void testBitMultiVectorClassBindingsDotProduct() throws IOException { + String fieldName = "vector"; + int dims = 8; + float[][] docVector = new float[][] { { 124 } }; + // 124 in binary is b01111100 + List> queryVector = List.of( + Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4, (byte) 1, (byte) 125, (byte) -12) + ); + List> floatQueryVector = List.of(Arrays.asList(1.4f, -1.4f, 0.42f, 0.0f, 1f, -1f, -0.42f, 1.2f)); + List> invalidQueryVector = List.of(Arrays.asList((byte) 1, (byte) 1)); + List hexidecimalString = List.of(HexFormat.of().formatHex(new byte[] { 124 })); + + List fields = List.of( + new BitMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BIT), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 5 } }), + "test", + ElementType.BIT, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + MaxSimDotProduct function = new MaxSimDotProduct(scoreScript, queryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + -12 + 2 + 4 + 1 + 125, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, floatQueryVector, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + 0.42f + 0f + 1f - 1f - 0.42f, + function.maxSimDotProduct(), + 0.001 + ); + + function = new MaxSimDotProduct(scoreScript, hexidecimalString, fieldName); + assertEquals( + "maxSimDotProduct result is not equal to the expected value!", + Integer.bitCount(124), + function.maxSimDotProduct(), + 0.0 + ); + + // Check each function rejects query vectors with the wrong dimension + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new MaxSimDotProduct(scoreScript, invalidQueryVector, fieldName) + ); + assertThat( + e.getMessage(), + containsString( + "query vector contains inner vectors which have incorrect number of dimensions. " + + "Must be [1] for bitwise operations, or [8] for byte wise operations: provided [2]." + ) + ); + } + } + + public void testByteVsFloatSimilarity() throws IOException { + int dims = 5; + float[][] docVector = new float[][] { { 1f, 127f, -128f, 5f, -10f } }; + float[][] magnitudes = new float[][] { { 0 } }; + for (int i = 0; i < docVector.length; i++) { + magnitudes[i][0] = (float) Math.sqrt(VectorUtil.dotProduct(docVector[i], docVector[i])); + } + List> listFloatVector = List.of(Arrays.asList(1f, 125f, -12f, 2f, 4f)); + List> listByteVector = List.of(Arrays.asList((byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4)); + float[][] floatVector = new float[][] { { 1f, 125f, -12f, 2f, 4f } }; + byte[][] byteVector = new byte[][] { { (byte) 1, (byte) 125, (byte) -12, (byte) 2, (byte) 4 } }; + + List fields = List.of( + new FloatMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.FLOAT), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field1", + ElementType.FLOAT, + dims + ), + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { docVector }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(magnitudes), + "field3", + ElementType.BYTE, + dims + ) + ); + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field("vector")).thenAnswer(mock -> field); + + int dotProductExpected = 17382; + MaxSimDotProduct maxSimDotProduct = new MaxSimDotProduct(scoreScript, listFloatVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + maxSimDotProduct = new MaxSimDotProduct(scoreScript, listByteVector, "vector"); + assertEquals(field.getName(), dotProductExpected, maxSimDotProduct.maxSimDotProduct(), 0.001); + switch (field.getElementType()) { + case BYTE -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(byteVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(floatVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(byte[][] queryVector)] instead")); + } + case FLOAT -> { + assertEquals(field.getName(), dotProductExpected, field.get().maxSimDotProduct(floatVector), 0.001); + UnsupportedOperationException e = expectThrows( + UnsupportedOperationException.class, + () -> field.get().maxSimDotProduct(byteVector) + ); + assertThat(e.getMessage(), containsString("use [float maxSimDotProduct(float[][] queryVector)] instead")); + } + } + } + } + + public void testByteBoundaries() throws IOException { + String fieldName = "vector"; + int dims = 1; + float[] docVector = new float[] { 0 }; + List> greaterThanVector = List.of(List.of(128)); + List> lessThanVector = List.of(List.of(-129)); + List> decimalVector = List.of(List.of(0.5)); + + List fields = List.of( + new ByteMultiDenseVectorDocValuesField( + MultiDenseVectorScriptDocValuesTests.wrap(new float[][][] { { docVector } }, ElementType.BYTE), + MultiDenseVectorScriptDocValuesTests.wrap(new float[][] { { 1 } }), + "test", + ElementType.BYTE, + dims + ) + ); + + for (MultiDenseVectorDocValuesField field : fields) { + field.setNextDocId(0); + + ScoreScript scoreScript = mock(ScoreScript.class); + when(scoreScript.field(fieldName)).thenAnswer(mock -> field); + + IllegalArgumentException e; + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, greaterThanVector, fieldName)); + assertEquals( + "element_type [byte] vectors only support integers between [-128, 127] but found [128.0] at dim [0]; " + + "Preview of invalid vector: [128.0]", + e.getMessage() + ); + + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, lessThanVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support integers between [-128, 127] but found [-129.0] at dim [0]; " + + "Preview of invalid vector: [-129.0]" + ); + e = expectThrows(IllegalArgumentException.class, () -> new MaxSimDotProduct(scoreScript, decimalVector, fieldName)); + assertEquals( + e.getMessage(), + "element_type [byte] vectors only support non-decimal values but found decimal value [0.5] at dim [0]; " + + "Preview of invalid vector: [0.5]" + ); + } + } + + public void testDimMismatch() throws IOException { + + } +} diff --git a/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java new file mode 100644 index 0000000000000..12f4b931b4d0a --- /dev/null +++ b/server/src/test/java/org/elasticsearch/script/field/vectors/MultiDenseVectorTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.script.field.vectors; + +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.VectorUtil; +import org.elasticsearch.index.mapper.vectors.MultiDenseVectorFieldMapper; +import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.function.IntFunction; + +public class MultiDenseVectorTests extends ESTestCase { + + @BeforeClass + public static void setup() { + assumeTrue("Requires multi-dense vector support", MultiDenseVectorFieldMapper.FEATURE_FLAG.isEnabled()); + } + + public void testByteUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + byte[][] docVector = new byte[count][dims]; + float[][] queryVector = new float[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(docVector[i]); + for (int j = 0; j < dims; j++) { + queryVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newByteVector(docVector); + UnsupportedOperationException e; + + e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(byte[][] queryVector)] instead"); + } + + public void testFloatUnsupported() { + int count = randomIntBetween(1, 16); + int dims = randomIntBetween(1, 16); + float[][] docVector = new float[count][dims]; + byte[][] queryVector = new byte[count][dims]; + for (int i = 0; i < docVector.length; i++) { + random().nextBytes(queryVector[i]); + for (int j = 0; j < dims; j++) { + docVector[i][j] = randomFloat(); + } + } + + MultiDenseVector knn = newFloatVector(docVector); + + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, () -> knn.maxSimDotProduct(queryVector)); + assertEquals(e.getMessage(), "use [float maxSimDotProduct(float[][] queryVector)] instead"); + } + + static MultiDenseVector newFloatVector(float[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new FloatMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static MultiDenseVector newByteVector(byte[][] vector) { + BytesRef magnitudes = magnitudes(vector.length, i -> (float) Math.sqrt(VectorUtil.dotProduct(vector[i], vector[i]))); + return new ByteMultiDenseVector(VectorIterator.from(vector), magnitudes, vector.length, vector[0].length); + } + + static BytesRef magnitudes(int count, IntFunction magnitude) { + ByteBuffer magnitudeBuffer = ByteBuffer.allocate(count * Float.BYTES).order(ByteOrder.LITTLE_ENDIAN); + for (int i = 0; i < count; i++) { + magnitudeBuffer.putFloat(magnitude.apply(i)); + } + return new BytesRef(magnitudeBuffer.array()); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index bd423999722f3..c9185fe35e677 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -137,17 +137,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -157,17 +155,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, @@ -192,17 +188,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -212,17 +206,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java index c300bfed5f62a..ad2543548dcae 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java @@ -59,10 +59,9 @@ public void setUp() throws Exception { @Override protected InternalFilters createTestInstance(String name, Map metadata, InternalAggregations aggregations) { final List buckets = new ArrayList<>(); - for (int i = 0; i < keys.size(); ++i) { - String key = keys.get(i); + for (String key : keys) { int docCount = randomIntBetween(0, 1000); - buckets.add(new InternalFilters.InternalBucket(key, docCount, aggregations, keyed, keyedBucket)); + buckets.add(new InternalBucket(key, docCount, aggregations)); } return new InternalFilters(name, buckets, keyed, keyedBucket, metadata); } @@ -94,7 +93,7 @@ protected InternalFilters mutateInstance(InternalFilters instance) { case 0 -> name += randomAlphaOfLength(5); case 1 -> { buckets = new ArrayList<>(buckets); - buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed, keyedBucket)); + buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY)); } default -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index 9e6829139d772..5eb1500e37269 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -106,7 +106,7 @@ private InternalDateHistogram createTestInstance( // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { long key = startingDate + intervalMillis * i; - buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations)); + buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -181,13 +181,7 @@ protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalDateHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index db93bc5dfe179..f97a836712e36 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -74,7 +74,7 @@ protected InternalHistogram createTestInstance(String name, Map // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { final int docCount = TestUtil.nextInt(random(), 1, 50); - buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations)); + buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -96,7 +96,7 @@ public void testHandlesNaN() { newBuckets.addAll(buckets.subList(0, buckets.size() - 1)); } InternalHistogram.Bucket b = buckets.get(buckets.size() - 1); - newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations)); + newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, b.format, b.aggregations)); List reduceMe = List.of(histogram, histogram2); InternalAggregationTestCase.reduce(reduceMe, mockReduceContext(mockBuilder(reduceMe)).forPartialReduction()); @@ -171,13 +171,7 @@ protected InternalHistogram mutateInstance(InternalHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java index 5ca78f322491b..dc5b57619676e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java @@ -75,16 +75,7 @@ private InternalIpPrefix createTestInstance( BytesRef key = itr.next(); boolean v6 = InetAddressPoint.decode(key.bytes) instanceof Inet6Address; buckets.add( - new InternalIpPrefix.Bucket( - DocValueFormat.IP, - key, - keyed, - v6, - prefixLength, - appendPrefixLength, - randomLongBetween(0, Long.MAX_VALUE), - aggregations - ) + new InternalIpPrefix.Bucket(key, v6, prefixLength, appendPrefixLength, randomLongBetween(0, Long.MAX_VALUE), aggregations) ); } @@ -126,7 +117,6 @@ protected void assertReduced(InternalIpPrefix reduced, List in Map expectedCounts = new HashMap<>(); for (InternalIpPrefix i : inputs) { for (InternalIpPrefix.Bucket b : i.getBuckets()) { - assertThat(b.getFormat(), equalTo(DocValueFormat.IP)); long acc = expectedCounts.getOrDefault(b.getKey(), 0L); acc += b.getDocCount(); expectedCounts.put(b.getKey(), acc); @@ -146,20 +136,16 @@ protected void assertReduced(InternalIpPrefix reduced, List in public void testPartialReduceNoMinDocCount() { InternalIpPrefix.Bucket b1 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))), false, - false, 1, false, 1, InternalAggregations.EMPTY ); InternalIpPrefix.Bucket b2 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("200.0.0.1"))), false, - false, 1, false, 2, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index b888e61e1bbf9..383065193c4d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -72,7 +72,7 @@ protected InternalBinaryRange createTestInstance( for (int i = 0; i < ranges.size(); ++i) { final int docCount = randomIntBetween(1, 100); final String key = (i == nullKey) ? null : randomAlphaOfLength(10); - buckets.add(new InternalBinaryRange.Bucket(format, keyed, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); + buckets.add(new InternalBinaryRange.Bucket(format, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); } @@ -113,7 +113,6 @@ protected InternalBinaryRange mutateInstance(InternalBinaryRange instance) { buckets.add( new InternalBinaryRange.Bucket( format, - keyed, "range_a", new BytesRef(randomAlphaOfLengthBetween(1, 20)), new BytesRef(randomAlphaOfLengthBetween(1, 20)), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index 255ad7c4417b3..fdfffaf8fb8e7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -81,7 +81,7 @@ protected InternalDateRange createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalDateRange(name, buckets, format, keyed, metadata); } @@ -105,9 +105,7 @@ protected InternalDateRange mutateInstance(InternalDateRange instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java index 49144ec2f40fb..dcb41322a9426 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java @@ -63,7 +63,7 @@ protected InternalGeoDistance createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations, keyed)); + buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations)); } return new InternalGeoDistance(name, buckets, keyed, metadata); } @@ -86,9 +86,7 @@ protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false) - ); + buckets.add(new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index da0fbd94d6ed6..0d957255b6416 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -76,7 +76,7 @@ public void setUp() throws Exception { int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalRange<>(name, buckets, format, keyed, metadata); } @@ -100,9 +100,7 @@ protected Class interna buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index 9f667b3efcb61..b2f79c02baf8d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -81,11 +81,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return new Object[0]; @@ -161,11 +156,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return mock(InternalTDigestPercentiles.class); diff --git a/test/fixtures/build.gradle b/test/fixtures/build.gradle index 02d68517903a3..e69de29bb2d1d 100644 --- a/test/fixtures/build.gradle +++ b/test/fixtures/build.gradle @@ -1,9 +0,0 @@ - -subprojects { - // fixtures don't have tests, these are external projects used by the build - pluginManager.withPlugin('java') { - tasks.named('test').configure { - enabled = false - } - } -} diff --git a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java index 375f428f748e6..58f32292fa91c 100644 --- a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java +++ b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java @@ -31,6 +31,8 @@ import java.util.List; import java.util.Objects; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThan; public class S3HttpHandlerTests extends ESTestCase { @@ -261,7 +263,7 @@ public void testListAndAbortMultipartUpload() { path/blob10000false\ """), handleRequest(handler, "GET", "/bucket/?uploads&prefix=path/blob")); - assertEquals(RestStatus.NOT_FOUND, handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" + final var completeUploadResponse = handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" @@ -272,7 +274,13 @@ public void testListAndAbortMultipartUpload() { %s 2 - """, part1Etag, part2Etag)).status()); + """, part1Etag, part2Etag)); + if (completeUploadResponse.status() == RestStatus.OK) { + // possible, but rare, indicating that S3 started processing the upload before returning an error + assertThat(completeUploadResponse.body().utf8ToString(), allOf(containsString(""), containsString("NoSuchUpload"))); + } else { + assertEquals(RestStatus.NOT_FOUND, completeUploadResponse.status()); + } } private static String getUploadId(BytesReference createUploadResponseBody) { diff --git a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java index 40fb4f91c77d0..38c7b1eb04772 100644 --- a/test/framework/src/main/java/org/elasticsearch/node/MockNode.java +++ b/test/framework/src/main/java/org/elasticsearch/node/MockNode.java @@ -31,6 +31,7 @@ import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.plugins.MockPluginsService; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.readiness.MockReadinessService; import org.elasticsearch.readiness.ReadinessService; @@ -279,10 +280,11 @@ private MockNode( final Collection> classpathPlugins, final boolean forbidPrivateIndexSettings ) { - super(NodeConstruction.prepareConstruction(environment, new MockServiceProvider() { + super(NodeConstruction.prepareConstruction(environment, null, new MockServiceProvider() { + @Override - PluginsService newPluginService(Environment environment, Settings settings) { - return new MockPluginsService(settings, environment, classpathPlugins); + PluginsService newPluginService(Environment environment, PluginsLoader pluginsLoader) { + return new MockPluginsService(environment.settings(), environment, classpathPlugins); } }, forbidPrivateIndexSettings)); diff --git a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java index e4734f9cf021e..d51b2cfb450bc 100644 --- a/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java +++ b/test/framework/src/main/java/org/elasticsearch/plugins/MockPluginsService.java @@ -20,7 +20,6 @@ import org.elasticsearch.plugins.spi.SPIClassIterator; import java.lang.reflect.Constructor; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -44,14 +43,18 @@ public class MockPluginsService extends PluginsService { * @param classpathPlugins Plugins that exist in the classpath which should be loaded */ public MockPluginsService(Settings settings, Environment environment, Collection> classpathPlugins) { - super(settings, environment.configFile(), environment.modulesFile(), environment.pluginsFile()); + super(settings, environment.configFile(), new PluginsLoader(environment.modulesFile(), environment.pluginsFile()) { - final Path configPath = environment.configFile(); + @Override + protected void addServerExportsService(Map> qualifiedExports) { + // tests don't run modular + } + }); List pluginsLoaded = new ArrayList<>(); for (Class pluginClass : classpathPlugins) { - Plugin plugin = loadPlugin(pluginClass, settings, configPath); + Plugin plugin = loadPlugin(pluginClass, settings, environment.configFile()); PluginDescriptor pluginInfo = new PluginDescriptor( pluginClass.getName(), "classpath plugin", @@ -69,7 +72,7 @@ public MockPluginsService(Settings settings, Environment environment, Collection if (logger.isTraceEnabled()) { logger.trace("plugin loaded from classpath [{}]", pluginInfo); } - pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin, pluginClass.getClassLoader(), ModuleLayer.boot())); + pluginsLoaded.add(new LoadedPlugin(pluginInfo, plugin)); } loadExtensions(pluginsLoaded); this.classpathPlugins = List.copyOf(pluginsLoaded); @@ -169,9 +172,4 @@ private static List createExtensions( } return extensions; } - - @Override - protected void addServerExportsService(Map> qualifiedExports) { - // tests don't run modular - } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index d98b51adce615..5b2beaee00bfe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2331,10 +2331,18 @@ public static void safeAwait(CyclicBarrier barrier) { * flag and asserting that the latch is indeed completed before the timeout. */ public static void safeAwait(CountDownLatch countDownLatch) { + safeAwait(countDownLatch, SAFE_AWAIT_TIMEOUT); + } + + /** + * Await on the given {@link CountDownLatch} with a supplied timeout, preserving the thread's interrupt status + * flag and asserting that the latch is indeed completed before the timeout. + */ + public static void safeAwait(CountDownLatch countDownLatch, TimeValue timeout) { try { assertTrue( "safeAwait: CountDownLatch did not reach zero within the timeout", - countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + countDownLatch.await(timeout.millis(), TimeUnit.MILLISECONDS) ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 9ddcf39d24d98..0c466b9162eb8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -138,11 +138,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return inboundException; diff --git a/test/test-clusters/src/main/resources/fips/fips_java.policy b/test/test-clusters/src/main/resources/fips/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/test/test-clusters/src/main/resources/fips/fips_java.policy +++ b/test/test-clusters/src/main/resources/fips/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 0f732d2017c74..c6bfb5b1b2778 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -122,17 +122,16 @@ public InternalAggregations getAggregations() { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), getKey()); builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); - if (getShowDocCountError()) { + if (showDocCountError) { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -589,7 +588,7 @@ public List getBuckets() { @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } @Override diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java index 0c6e94a15ec36..1691aedf543f4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsAggregator.java @@ -264,6 +264,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw continue; } if (spare == null) { + checkRealMemoryCBForInternalBucket(); spare = new InternalMultiTerms.Bucket(null, 0, null, showTermDocCountError, 0, formats, keyConverters); spareKey = new BytesRef(); } @@ -287,11 +288,10 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, a) -> b.aggregations = a); - InternalAggregation[] result = new InternalAggregation[Math.toIntExact(owningBucketOrds.size())]; - for (int ordIdx = 0; ordIdx < result.length; ordIdx++) { - result[ordIdx] = buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)); - } - return result; + return buildAggregations( + Math.toIntExact(owningBucketOrds.size()), + ordIdx -> buildResult(otherDocCounts.get(ordIdx), topBucketsPerOrd.get(ordIdx)) + ); } } diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index da39d221f92f1..ac8ce1b0fd331 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -1,5 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -11,6 +15,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register("leader-cluster") { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -24,7 +30,19 @@ def followCluster = testClusters.register("follow-cluster") { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -41,7 +59,7 @@ tasks.register("writeJavaPolicy") { policyFile.write( [ "grant {", - " permission java.io.FilePermission \"${-> testClusters."follow-cluster".getFirstNode().getServerLog()}\", \"read\";", + " permission java.io.FilePermission \"${-> followCluster.map { it.getFirstNode().getServerLog() }.get()}\", \"read\";", "};" ].join("\n") ) @@ -50,11 +68,28 @@ tasks.register("writeJavaPolicy") { tasks.register("follow-cluster", RestIntegTestTask) { dependsOn 'writeJavaPolicy', "leader-cluster" - useCluster leaderCluster - systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) + useCluster leaderCluster + systemProperty 'tests.target_cluster', 'follow' + nonInputProperties.systemProperty 'java.security.policy', "file://${policyFile}" + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllHttpSocketURI().get(0) } + def followerUri = followInfo.map { it.getAllHttpSocketURI().get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'log', followCluster.map(c -> c.getFirstNode().getServerLog()) } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 2475a56aa87aa..86abbbbeedf6b 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,6 +1,10 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -12,6 +16,7 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -21,12 +26,23 @@ def leaderCluster = testClusters.register('leader-cluster') { } def middleCluster = testClusters.register('middle-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - user username: 'admin', password: 'admin-password', role: 'superuser' - setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register("leader-cluster", RestIntegTestTask) { @@ -40,30 +56,74 @@ tasks.register("middle-cluster", RestIntegTestTask) { useCluster testClusters.named("leader-cluster") systemProperty 'tests.target_cluster', 'middle' systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host',leaderCluster.map(c -> c.allHttpSocketURI.get(0)) -} + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri +} tasks.register('follow-cluster', RestIntegTestTask) { dependsOn "leader-cluster", "middle-cluster" - useCluster leaderCluster - useCluster middleCluster - systemProperty 'tests.target_cluster', 'follow' - systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c -> c.allHttpSocketURI.get(0)) - nonInputProperties.systemProperty 'tests.middle_host', middleCluster.map(c -> c.allHttpSocketURI.get(0)) + useCluster leaderCluster + useCluster middleCluster + systemProperty 'tests.target_cluster', 'follow' + systemProperty 'tests.leader_cluster_repository_path', "${buildDir}/cluster/shared/repo/leader-cluster" + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def middleUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.middle_host', middleUri } -testClusters.matching {it.name == "follow-cluster" }.configureEach { +testClusters.matching { it.name == "follow-cluster" }.configureEach { testDistribution = 'DEFAULT' setting 'xpack.monitoring.collection.enabled', 'true' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + + def middleUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("middle-cluster") + it.parameters.service = serviceProvider + }.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE setting 'cluster.remote.middle_cluster.seeds', - { "\"${middleCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${middleUris.get().join(",")}\"" }, IGNORE_VALUE } diff --git a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle index 7661ea08b057d..ff342accef277 100644 --- a/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle +++ b/x-pack/plugin/ccr/qa/non-compliant-license/build.gradle @@ -1,5 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -10,6 +14,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa:') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.security.enabled', 'true' @@ -21,8 +27,20 @@ def followerCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUris = leaderInfo.map { it.getAllTransportPortURI() } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().join(",")}\"" }, IGNORE_VALUE + { "\"${leaderUris.get().join(",")}\"" }, IGNORE_VALUE } tasks.register('leader-cluster', RestIntegTestTask) { @@ -34,7 +52,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', followerCluster.map(c -> c.allHttpSocketURI.get(0)) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def followInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + } + def followUri = followInfo.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', followUri } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ccr/qa/restart/build.gradle b/x-pack/plugin/ccr/qa/restart/build.gradle index 47d37801e2dcf..848beb1da10ae 100644 --- a/x-pack/plugin/ccr/qa/restart/build.gradle +++ b/x-pack/plugin/ccr/qa/restart/build.gradle @@ -1,6 +1,10 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -9,6 +13,8 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leaderCluster = testClusters.register('leader-cluster') { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' @@ -22,12 +28,23 @@ def followCluster = testClusters.register('follow-cluster') { setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' user username: 'admin', password: 'admin-password', role: 'superuser' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderInfo = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + } + def leaderUri = leaderInfo.map { it.getAllTransportPortURI().get(0) } + setting 'cluster.remote.leader_cluster.seeds', - { "\"${leaderCluster.get().getAllTransportPortURI().get(0)}\"" }, IGNORE_VALUE + { "\"${leaderUri.get()}\"" }, IGNORE_VALUE nameCustomization = { 'follow' } } - tasks.register('leader-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.target_cluster', 'leader' @@ -37,8 +54,19 @@ tasks.register('follow-cluster', RestIntegTestTask) { dependsOn 'leader-cluster' useCluster leaderCluster systemProperty 'tests.target_cluster', 'follow' + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + nonInputProperties.systemProperty 'tests.leader_host', - "${-> leaderCluster.get().getAllHttpSocketURI().get(0)}" + "${-> leaderUri.get() }" } tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { @@ -48,10 +76,27 @@ tasks.register("followClusterRestartTest", StandaloneRestIntegTestTask) { systemProperty 'tests.rest.load_packaged', 'false' systemProperty 'tests.target_cluster', 'follow-restart' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + def followUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("follow-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.join(",") } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri + nonInputProperties.systemProperty 'tests.rest.cluster', followUris + doFirst { - followCluster.get().restart() - nonInputProperties.systemProperty 'tests.leader_host', leaderCluster.map(c-> c.getAllHttpSocketURI().get(0)) - nonInputProperties.systemProperty 'tests.rest.cluster', followCluster.map(c -> c.getAllHttpSocketURI().join(",")) + serviceProvider.get().restart(clusterPath, "follow-cluster") } } diff --git a/x-pack/plugin/ccr/qa/security/build.gradle b/x-pack/plugin/ccr/qa/security/build.gradle index 5515aefeaa091..454a9ae721736 100644 --- a/x-pack/plugin/ccr/qa/security/build.gradle +++ b/x-pack/plugin/ccr/qa/security/build.gradle @@ -1,4 +1,9 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' @@ -10,26 +15,38 @@ dependencies { testImplementation project(':x-pack:plugin:ccr:qa') } +def clusterPath = getPath() + def leadCluster = testClusters.register('leader-cluster') { - testDistribution = 'DEFAULT' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - extraConfigFile 'roles.yml', file('leader-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + extraConfigFile 'roles.yml', file('leader-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } testClusters.register('follow-cluster') { - testDistribution = 'DEFAULT' - setting 'cluster.remote.leader_cluster.seeds', { - "\"${leadCluster.get().getAllTransportPortURI().join(",")}\"" - }, IGNORE_VALUE - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.security.enabled', 'true' - setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests - extraConfigFile 'roles.yml', file('follower-roles.yml') - user username: "test_admin", role: "superuser" - user username: "test_ccr", role: "ccruser" + testDistribution = 'DEFAULT' + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUris = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.AllTransportPortURI } + + setting 'cluster.remote.leader_cluster.seeds', { + "\"${leaderUris.get().join(",")}\"" + }, IGNORE_VALUE + setting 'xpack.license.self_generated.type', 'trial' + setting 'xpack.security.enabled', 'true' + setting 'xpack.monitoring.collection.enabled', 'false' // will be enabled by tests + extraConfigFile 'roles.yml', file('follower-roles.yml') + user username: "test_admin", role: "superuser" + user username: "test_ccr", role: "ccruser" } tasks.register('leader-cluster', RestIntegTestTask) { @@ -41,7 +58,17 @@ def followerClusterTestTask = tasks.register('follow-cluster', RestIntegTestTask dependsOn 'leader-cluster' useCluster leadCluster systemProperty 'tests.target_cluster', 'follow' - nonInputProperties.systemProperty 'tests.leader_host', leadCluster.map(c-> c.getAllHttpSocketURI().get(0)) + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + def leaderUri = project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("leader-cluster") + it.parameters.service = serviceProvider + }.map { it.allHttpSocketURI.get(0) } + + nonInputProperties.systemProperty 'tests.leader_host', leaderUri } tasks.named("check").configure { dependsOn(followerClusterTestTask) } diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index b4f17cb436df5..d4c3f67bf3ebb 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -65,7 +65,7 @@ dependencies { testImplementation project(path: ':modules:rest-root') testImplementation project(path: ':modules:health-shards-availability') // Needed for Fips140ProviderVerificationTests - testCompileOnly('org.bouncycastle:bc-fips:1.0.2.4') + testCompileOnly('org.bouncycastle:bc-fips:1.0.2.5') testImplementation(project(':x-pack:license-tools')) { transitive = false diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java index cd44aaafbfae2..05eb7551330b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java @@ -130,11 +130,11 @@ public boolean equals(Object obj) { * still result in unique snapshot names. */ public static String generateSnapshotName(String name) { - return generateSnapshotName(name, new IndexNameExpressionResolver.ResolverContext()); + return generateSnapshotName(name, System.currentTimeMillis()); } - public static String generateSnapshotName(String name, IndexNameExpressionResolver.Context context) { - String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, context.getStartTime()); + public static String generateSnapshotName(String name, long now) { + String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, now); // TODO: we are breaking the rules of UUIDs by lowercasing this here, find an alternative (snapshot names must be lowercase) return candidate + "-" + UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java index 47f7fea8dc199..6e78c2e8d3ef3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java @@ -184,7 +184,7 @@ public void checkForDeprecations(String id, NamedXContentRegistry namedXContentR onDeprecation.accept( // max_page_search_size got deprecated in 7.8, still accepted for 8.x, to be removed in 9.x new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index ce8cd5ae46ace..bee6351582bc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; @@ -185,13 +184,12 @@ public void testNameGeneration() { assertThat(generateSnapshotName("name"), startsWith("name-")); assertThat(generateSnapshotName("name").length(), greaterThan("name-".length())); - IndexNameExpressionResolver.ResolverContext resolverContext = new IndexNameExpressionResolver.ResolverContext(time); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.15-")); - assertThat(generateSnapshotName("", resolverContext).length(), greaterThan("name-2019.03.15-".length())); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.15-")); + assertThat(generateSnapshotName("", time).length(), greaterThan("name-2019.03.15-".length())); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.01-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.01-")); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019-03-15.21:09:00-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019-03-15.21:09:00-")); } public void testNameValidation() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java index bbf80279b0b2a..60db8b6522518 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java @@ -218,7 +218,7 @@ public void testThatDelegateTrustManagerIsRespected() throws Exception { if (cert.endsWith("/ca")) { assertTrusted(trustManager, cert); } else { - assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to find certificate chain." : "PKIX path building failed.*"); + assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to construct a valid chain" : "PKIX path building failed.*"); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index f1c2de11496bf..8cfecc432c661 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -930,7 +930,7 @@ public void testCheckForDeprecations() { equalTo( Collections.singletonList( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -952,7 +952,7 @@ public void testCheckForDeprecations() { equalTo( List.of( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -982,7 +982,7 @@ public void testCheckForDeprecations() { null ), new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java index aa200f7ae9acb..214f9150dfcc5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java @@ -32,7 +32,7 @@ protected final BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest r // We need to consume parameters and content from the REST request in order to bypass unrecognized param errors // and return a license error. request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); return channel -> channel.sendResponse( new RestResponse(channel, LicenseUtils.newComplianceException(this.licenseState, this.product)) ); diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java deleted file mode 100644 index 95000a5364e12..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static java.util.Collections.emptyList; - -public final class StringQueryPredicate extends FullTextPredicate { - - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "StringQueryPredicate", - StringQueryPredicate::new - ); - - private final Map fields; - - public StringQueryPredicate(Source source, String query, String options) { - super(source, query, options, emptyList()); - - // inferred - this.fields = FullTextUtils.parseFields(optionMap(), source); - } - - StringQueryPredicate(StreamInput in) throws IOException { - super(in); - assert super.children().isEmpty(); - this.fields = FullTextUtils.parseFields(optionMap(), source()); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, StringQueryPredicate::new, query(), options()); - } - - @Override - public Expression replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - public Map fields() { - return fields; - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index b6383fac33299..7836522c77130 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -26,7 +25,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.RegexQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -73,18 +71,6 @@ private static Query translateField(RegexMatch e, String targetFieldName) { } } - public static class StringQueries extends ExpressionTranslator { - - @Override - protected Query asQuery(StringQueryPredicate q, TranslatorHandler handler) { - return doTranslate(q, handler); - } - - public static Query doTranslate(StringQueryPredicate q, TranslatorHandler handler) { - return new QueryStringQuery(q.source(), q.query(), q.fields(), q); - } - } - public static class MultiMatches extends ExpressionTranslator { @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java index 8ac90e6314174..8dcb87749ae48 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java @@ -14,7 +14,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Collections; @@ -55,20 +54,13 @@ public class QueryStringQuery extends Query { private final String query; private final Map fields; - private StringQueryPredicate predicate; private final Map options; - // dedicated constructor for QueryTranslator - public QueryStringQuery(Source source, String query, String fieldName) { - this(source, query, Collections.singletonMap(fieldName, Float.valueOf(1.0f)), null); - } - - public QueryStringQuery(Source source, String query, Map fields, StringQueryPredicate predicate) { + public QueryStringQuery(Source source, String query, Map fields, Map options) { super(source); this.query = query; this.fields = fields; - this.predicate = predicate; - this.options = predicate == null ? Collections.emptyMap() : predicate.optionMap(); + this.options = options == null ? Collections.emptyMap() : options; } @Override @@ -95,7 +87,7 @@ public String query() { @Override public int hashCode() { - return Objects.hash(query, fields, predicate); + return Objects.hash(query, fields); } @Override @@ -109,7 +101,7 @@ public boolean equals(Object obj) { } QueryStringQuery other = (QueryStringQuery) obj; - return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(predicate, other.predicate); + return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(options, other.options); } @Override diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java index 0f80011961092..22e7b93e84ce1 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java @@ -10,42 +10,40 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; public class QueryStringQueryTests extends ESTestCase { public void testQueryBuilding() { - QueryStringQueryBuilder qb = getBuilder("lenient=true"); + QueryStringQueryBuilder qb = getBuilder(Map.of("lenient", "true")); assertThat(qb.lenient(), equalTo(true)); - qb = getBuilder("lenient=true;default_operator=AND"); + qb = getBuilder(Map.of("lenient", "true", "default_operator", "AND")); assertThat(qb.lenient(), equalTo(true)); assertThat(qb.defaultOperator(), equalTo(Operator.AND)); - Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder("pizza=yummy")); + Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("pizza", "yummy"))); assertThat(e.getMessage(), equalTo("illegal query_string option [pizza]")); - e = expectThrows(ElasticsearchParseException.class, () -> getBuilder("type=aoeu")); + e = expectThrows(ElasticsearchParseException.class, () -> getBuilder(Map.of("type", "aoeu"))); assertThat(e.getMessage(), equalTo("failed to parse [multi_match] query type [aoeu]. unknown type.")); } - private static QueryStringQueryBuilder getBuilder(String options) { + private static QueryStringQueryBuilder getBuilder(Map options) { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", options); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); - return (QueryStringQueryBuilder) mmq.asBuilder(); + final QueryStringQuery query = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), options); + return (QueryStringQueryBuilder) query.asBuilder(); } public void testToString() { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", ""); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); + final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), Map.of()); assertEquals("QueryStringQuery@1:2[{foo=1.0}:eggplant]", mmq.toString()); } } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 716c679b2fc1a..397a6d03dd309 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -85,7 +85,6 @@ tasks.named("test").configure { } } File functionsFolder = file("build/testrun/test/temp/esql/functions") - File signatureFolder = file("build/testrun/test/temp/esql/functions/signature") File typesFolder = file("build/testrun/test/temp/esql/functions/types") def functionsDocFolder = file("${rootDir}/docs/reference/esql/functions") def effectiveProjectDir = projectDir diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java index dcd13671670d8..d7d9da052a962 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -191,18 +191,29 @@ SegmentState segmentState(int segment) throws IOException { private class SegmentState { private final Weight weight; private final LeafReaderContext ctx; + /** * Lazily initialed {@link Scorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private Scorer scorer; + /** + * Thread that initialized the {@link #scorer}. + */ + private Thread scorerThread; + /** * Lazily initialed {@link BulkScorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private BulkScorer bulkScorer; + /** + * Thread that initialized the {@link #bulkScorer}. + */ + private Thread bulkScorerThread; + /** * Set to {@code true} if, in the process of building a {@link Scorer} or {@link BulkScorer}, * the {@link Weight} tells us there aren't any matches. @@ -223,7 +234,10 @@ BooleanVector scoreDense(int min, int max) throws IOException { if (noMatch) { return blockFactory.newConstantBooleanVector(false, length); } - if (bulkScorer == null) { + if (bulkScorer == null || // The bulkScorer wasn't initialized + Thread.currentThread() != bulkScorerThread // The bulkScorer was initialized on a different thread + ) { + bulkScorerThread = Thread.currentThread(); bulkScorer = weight.bulkScorer(ctx); if (bulkScorer == null) { noMatch = true; @@ -257,8 +271,11 @@ private void initScorer(int minDocId) throws IOException { if (noMatch) { return; } - if (scorer == null || scorer.iterator().docID() > minDocId) { - // The previous block might have been beyond this one, reset the scorer and try again. + if (scorer == null || // Scorer not initialized + scorerThread != Thread.currentThread() || // Scorer initialized on a different thread + scorer.iterator().docID() > minDocId // The previous block came "after" this one + ) { + scorerThread = Thread.currentThread(); scorer = weight.scorer(ctx); if (scorer == null) { noMatch = true; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index e3fc0e26e34e0..4baaf9ad89bd6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -220,20 +220,21 @@ void onSinkComplete() { * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ public void addRemoteSink(RemoteSink remoteSink, int instances) { - for (int i = 0; i < instances; i++) { - var fetcher = new RemoteSinkFetcher(remoteSink); - fetchExecutor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - fetcher.onSinkFailed(e); - } + fetchExecutor.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + failure.unwrapAndCollect(e); + buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading + } - @Override - protected void doRun() { + @Override + protected void doRun() { + for (int i = 0; i < instances; i++) { + var fetcher = new RemoteSinkFetcher(remoteSink); fetcher.fetchPage(); } - }); - } + } + }); } /** diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index adbf24cee10b0..1e23cf62917fc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,6 @@ avg(salary):double | always_false:boolean in -required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec index 3be3decaf351c..7bbf011176693 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/bucket.csv-spec @@ -760,3 +760,19 @@ c:long |b:date 3 |2025-10-01T00:00:00.000Z 4 |2023-11-01T00:00:00.000Z ; + +bucketWithFilteredCountRefingBucket +required_capability: implicit_casting_string_literal_to_temporal_amount + +FROM employees +| STATS c = COUNT(*) WHERE b > "1953-01-01T00:00:00.000Z" AND emp_no > 10020 BY b = BUCKET(birth_date, 1 year) +| SORT c, b +| LIMIT 4 +; + +c:long |b:date +0 |1952-01-01T00:00:00.000Z +0 |1953-01-01T00:00:00.000Z +0 |null +1 |1965-01-01T00:00:00.000Z +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 7e7c561fac3a5..734e2ef5e475e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,6 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warningRegex:Line 1:58: evaluation of \[to_datetime\(ul\)\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 3c38bd190b0b1..25b114b5d1daf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -580,7 +580,6 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_capability: enrich_load -required_capability: mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 537b69547c6be..3505b52e5599e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -99,7 +99,6 @@ int:integer |dbl:double ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -115,7 +114,6 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -131,7 +129,6 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -143,7 +140,6 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -156,7 +152,6 @@ emp_no:integer |salary_change:double ; inMultivalue -required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -169,7 +164,6 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -185,7 +179,6 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -201,7 +194,6 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change == 1.19.*\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index b399734151412..f4b6d41a7a027 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,6 @@ // Integral types-specific tests inLongAndInt -required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warningRegex:evaluation of \[avg_worked_seconds in \(372957040, salary_change.long, 236703986\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ long:long |ul:ul ; convertDoubleToUL -required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +125,6 @@ int:integer |long:long ; convertULToLong -required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +167,6 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +186,6 @@ int:integer |ii:integer ; convertLongToInt -required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +202,6 @@ long:long |int:integer ; convertULToInt -required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +233,6 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warningRegex:Line 1:79: evaluation of \[to_integer\(str1\)\] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +247,6 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +257,6 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +272,6 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +287,6 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int == 0\] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +301,6 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int\] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +313,6 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int in \(1, 7\)\] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +325,6 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -354,7 +340,6 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -370,7 +355,6 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int == 1.*\] failed, treating result as null. Only first 20 failures recorded diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 0fb6994ef759f..4418f7e0aa7ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 == ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +59,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 < ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +71,6 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 != ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +122,6 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +142,6 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +163,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +182,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warningRegex:evaluation of \[cidr_match\(ip1, \\\"127.0.0.2/32\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +192,6 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +205,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +224,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +236,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -366,7 +355,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -380,7 +368,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index da069836504d4..2fe2feb3bc219 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -214,8 +214,6 @@ height:double | s:double ; powSalarySquared -required_capability: pow_double - from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; salary:integer | s:double @@ -631,8 +629,6 @@ base:double | exponent:integer | result:double ; powIntInt -required_capability: pow_double - ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) ; @@ -642,8 +638,6 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_capability: pow_double - row s = 1 + pow(2, 2); s:double @@ -658,8 +652,6 @@ s:double ; powIntUL -required_capability: pow_double - row x = pow(1, 9223372036854775808); x:double @@ -667,8 +659,6 @@ x:double ; powLongUL -required_capability: pow_double - row x = to_long(1) | eval x = pow(x, 9223372036854775808); x:double @@ -676,8 +666,6 @@ x:double ; powUnsignedLongUL -required_capability: pow_double - row x = to_ul(1) | eval x = pow(x, 9223372036854775808); x:double @@ -701,8 +689,6 @@ null ; powULInt -required_capability: pow_double - row x = pow(to_unsigned_long(9223372036854775807), 1); x:double @@ -710,8 +696,6 @@ x:double ; powULIntOverrun -required_capability: pow_double - ROW x = POW(9223372036854775808, 2) ; @@ -732,8 +716,6 @@ x:double ; powULLong -required_capability: pow_double - row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); x:double @@ -741,8 +723,6 @@ x:double ; powULLongOverrun -required_capability: pow_double - row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); x:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 01e7258e8a6ee..ac9948c90f5e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,6 @@ ############################################### convertFromStringQuantize -required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index 7a046786a4f19..66c5362a24134 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2348,6 +2348,42 @@ v:integer | job_positions:keyword 10094 | Accountant ; +docsStatsWithSimpleFiltering +required_capability: per_agg_filtering +// tag::aggFiltering[] +FROM employees +| STATS avg50s = AVG(salary)::LONG WHERE birth_date < "1960-01-01", + avg60s = AVG(salary)::LONG WHERE birth_date >= "1960-01-01" + BY gender +| SORT gender +// end::aggFiltering[] +| WHERE gender IS NOT NULL +; + +// tag::aggFiltering-result[] +avg50s:long |avg60s:long |gender:keyword +55462 |46637 |F +48279 |44879 |M +// end::aggFiltering-result[] +; + +docsStatsWithFilteringNoGroups +required_capability: per_agg_filtering +// tag::aggFilteringNoGroup[] +FROM employees +| EVAL Ks = salary / 1000 // thousands +| STATS under_40K = COUNT(*) WHERE Ks < 40, + inbetween = COUNT(*) WHERE 40 <= Ks AND Ks < 60, + over_60K = COUNT(*) WHERE 60 <= Ks, + total = COUNT(*) +// end::aggFilteringNoGroup[] +; + +// tag::aggFilteringNoGroup-result[] +under_40K:long |inbetween:long |over_60K:long |total:long +36 |39 |25 |100 +// end::aggFilteringNoGroup-result[] +; statsWithFiltering required_capability: per_agg_filtering @@ -2642,6 +2678,26 @@ c2:l |c2_f:l |m2:i |m2_f:i |c:l 1 |1 |5 |5 |21 ; +simpleCountOnFieldWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(emp_no) where emp_no < 10042 +; + +c1:long +41 +; + +simpleCountOnStarWithFilteringAndNoGrouping +required_capability: per_agg_filtering +from employees +| stats c1 = count(*) where emp_no < 10042 +; + +c1:long +41 +; + commonFilterExtractionWithAliasing required_capability: per_agg_filtering from employees diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 963245f9f0ea6..e103168d2e589 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -390,7 +390,6 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -582,7 +581,6 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions < \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -595,7 +593,6 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[job_positions > \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -612,7 +609,6 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions == \\\"Accountant\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -624,7 +620,6 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions\] failed, treating result as null. Only first 20 failures recorded. @@ -637,7 +632,6 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions in \(\\\"Accountant\\\", \\"Tech Lead\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -650,7 +644,6 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions < \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -667,7 +660,6 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions > \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -680,7 +672,6 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions == \\\"Accountant\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -937,7 +928,6 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -951,7 +941,6 @@ null | 19 ; startsWithText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -965,7 +954,6 @@ false | null ; substringOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -979,7 +967,6 @@ Gatew | null ; concatOfText -required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1518,7 +1505,6 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec index ad6dd4fecc3f7..af987b13acc82 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/union_types.csv-spec @@ -718,6 +718,7 @@ count:long | @timestamp:date multiIndexTsNanosToDatetimeStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data, sample_data_ts_nanos | EVAL @timestamp = DATE_TRUNC(1 hour, TO_DATETIME(@timestamp)) @@ -754,6 +755,7 @@ multiIndexTsLongStatsDrop required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -774,6 +776,7 @@ multiIndexTsLongStatsInline2 required_capability: union_types required_capability: union_types_agg_cast required_capability: casting_operator +required_capability: to_date_nanos FROM sample_data, sample_data_ts_long, sample_data_ts_nanos | STATS count=count(*) BY @timestamp::datetime @@ -917,6 +920,7 @@ multiIndexIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -958,6 +962,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL @timestamp = TO_DATETIME(@timestamp), client_ip = TO_IP(client_ip) @@ -1000,6 +1005,7 @@ multiIndexIpStringTsLongRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1041,6 +1047,7 @@ sample_data_ts_nanos | 2023-10-23T12:15:03.360Z | 172.21.2.162 | 3450233 multiIndexIpStringTsLongRenameDropped required_capability: union_types required_capability: metadata_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_DATETIME(@timestamp), host_ip = TO_IP(client_ip) @@ -1083,6 +1090,7 @@ multiIndexIpStringTsLongRenameToString required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | EVAL ts = TO_STRING(TO_DATETIME(@timestamp)), host_ip = TO_STRING(TO_IP(client_ip)) @@ -1125,6 +1133,7 @@ multiIndexWhereIpStringTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1141,6 +1150,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) == "172.21.2.162" @@ -1157,6 +1167,7 @@ multiIndexWhereIpStringLikeTsLong required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1173,6 +1184,7 @@ sample_data_ts_long | 3450233 | Connected to 10.1.0.3 multiIndexWhereIpStringLikeTsLongStats required_capability: union_types required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* | WHERE TO_LONG(@timestamp) < 1698068014937 AND TO_STRING(client_ip) LIKE "172.21.2.16?" @@ -1189,6 +1201,7 @@ multiIndexMultiColumnTypesRename required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1207,6 +1220,7 @@ multiIndexMultiColumnTypesRenameAndKeep required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 @@ -1226,6 +1240,7 @@ multiIndexMultiColumnTypesRenameAndDrop required_capability: union_types required_capability: metadata_fields required_capability: union_types_remove_fields +required_capability: to_date_nanos FROM sample_data* METADATA _index | WHERE event_duration > 8000000 diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 03d0b71894d9b..fbddb3d0e6989 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,6 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in >= to_ul\(74330435873664882\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownRange -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in .* to_ul\(.*\)\] failed, treating result as null. Only first 20 failures recorded. @@ -82,7 +80,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownIn -required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -96,7 +93,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsEquality -required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warningRegex:evaluation of \[bytes_in == bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -107,7 +103,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsInequality -required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warningRegex:evaluation of \[bytes_in < bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -138,7 +133,6 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warningRegex:evaluation of \[bytes_in == to_ul\(154551962150890564\)\] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +143,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toDegrees -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -161,7 +154,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toRadians -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java new file mode 100644 index 0000000000000..3a69983a0d86e --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlNodeFailureIT.java @@ -0,0 +1,116 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Make sure the failures on the data node come back as failures over the wire. + */ +@ESIntegTestCase.ClusterScope(minNumDataNodes = 2) +public class EsqlNodeFailureIT extends AbstractEsqlIntegTestCase { + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), FailingFieldPlugin.class); + } + + /** + * Use a runtime field that fails when loading field values to fail the entire query. + */ + public void testFailureLoadingFields() throws IOException { + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("fail_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "fail").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + client().admin().indices().prepareCreate("fail").setSettings(indexSettings(1, 0)).setMapping(mapping.endObject()).get(); + + int docCount = 100; + List docs = new ArrayList<>(docCount); + for (int d = 0; d < docCount; d++) { + docs.add(client().prepareIndex("ok").setSource("foo", d)); + } + docs.add(client().prepareIndex("fail").setSource("foo", 0)); + indexRandom(true, docs); + + ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> run("FROM fail,ok | LIMIT 100").close()); + assertThat(e.getMessage(), equalTo("test failure")); + } + + public static class FailingFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "fail"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + throw new ElasticsearchException("test failure"); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java index b86c46fd3fa7a..3b647583f1129 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/plugin/MatchOperatorIT.java @@ -14,9 +14,6 @@ import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.AbstractEsqlIntegTestCase; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; -import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.junit.Before; import java.util.List; @@ -32,12 +29,6 @@ public void setupIndex() { createAndPopulateIndex(); } - @Override - protected EsqlQueryResponse run(EsqlQueryRequest request) { - assumeTrue("match operator capability not available", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - return super.run(request); - } - public void testSimpleWhereMatch() { var query = """ FROM test diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index b2f0e2942d3cc..ef875d7ca01d8 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -112,8 +112,6 @@ WS : [ \r\n\t]+ -> channel(HIDDEN) ; -COLON : ':'; - // // Expression - used by most command // @@ -184,6 +182,7 @@ AND : 'and'; ASC : 'asc'; ASSIGN : '='; CAST_OP : '::'; +COLON : ':'; COMMA : ','; DESC : 'desc'; DOT : '.'; @@ -216,7 +215,6 @@ MINUS : '-'; ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; -EXPRESSION_COLON : {this.isDevVersion()}? COLON -> type(COLON); NESTED_WHERE : WHERE -> type(WHERE); diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index 16376d6863b2f..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -26,16 +26,16 @@ UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 WS=28 -COLON=29 -PIPE=30 -QUOTED_STRING=31 -INTEGER_LITERAL=32 -DECIMAL_LITERAL=33 -BY=34 -AND=35 -ASC=36 -ASSIGN=37 -CAST_OP=38 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 COMMA=39 DESC=40 DOT=41 @@ -142,13 +142,13 @@ CLOSING_METRICS_WS=128 'sort'=14 'stats'=15 'where'=16 -':'=29 -'|'=30 -'by'=34 -'and'=35 -'asc'=36 -'='=37 -'::'=38 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 ','=39 'desc'=40 '.'=41 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 33c4d8957d387..f84cfe3060503 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -69,7 +69,7 @@ booleanExpression | left=booleanExpression operator=OR right=booleanExpression #logicalBinary | valueExpression (NOT)? IN LP valueExpression (COMMA valueExpression)* RP #logicalIn | valueExpression IS NOT? NULL #isNull - | {this.isDevVersion()}? matchBooleanExpression #matchExpression + | matchBooleanExpression #matchExpression ; regexBooleanExpression diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index 16376d6863b2f..b1a16987dd8ce 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -26,16 +26,16 @@ UNKNOWN_CMD=25 LINE_COMMENT=26 MULTILINE_COMMENT=27 WS=28 -COLON=29 -PIPE=30 -QUOTED_STRING=31 -INTEGER_LITERAL=32 -DECIMAL_LITERAL=33 -BY=34 -AND=35 -ASC=36 -ASSIGN=37 -CAST_OP=38 +PIPE=29 +QUOTED_STRING=30 +INTEGER_LITERAL=31 +DECIMAL_LITERAL=32 +BY=33 +AND=34 +ASC=35 +ASSIGN=36 +CAST_OP=37 +COLON=38 COMMA=39 DESC=40 DOT=41 @@ -142,13 +142,13 @@ CLOSING_METRICS_WS=128 'sort'=14 'stats'=15 'where'=16 -':'=29 -'|'=30 -'by'=34 -'and'=35 -'asc'=36 -'='=37 -'::'=38 +'|'=29 +'by'=33 +'and'=34 +'asc'=35 +'='=36 +'::'=37 +':'=38 ','=39 'desc'=40 '.'=41 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 19f6276bf99d0..2bc31ff5696cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -140,6 +140,12 @@ public enum Cap { */ CASE_MV, + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + ENRICH_LOAD, + /** * Optimization for ST_CENTROID changed some results in cartesian data. #108713 */ @@ -307,7 +313,7 @@ public enum Cap { /** * Support for match operator as a colon. Previous support for match operator as MATCH has been removed */ - MATCH_OPERATOR_COLON(Build.current().isSnapshot()), + MATCH_OPERATOR_COLON, /** * Removing support for the {@code META} keyword. @@ -430,6 +436,12 @@ public enum Cap { */ SORTING_ON_SOURCE_AND_COUNTERS_FORBIDDEN, + /** + * Fix {@code SORT} when the {@code _source} field is not a sort key but + * is being returned. + */ + SORT_RETURNING_SOURCE_OK, + /** * Allow filter per individual aggregation. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index d7c3e261c3167..2be13398dab2f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -216,6 +216,7 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); checkForSortableDataTypes(p, failures); + checkSort(p, failures); checkFullTextQueryFunctions(p, failures); }); @@ -233,6 +234,18 @@ else if (p instanceof Lookup lookup) { return failures; } + private void checkSort(LogicalPlan p, Set failures) { + if (p instanceof OrderBy ob) { + ob.order().forEach(o -> { + o.forEachDown(Function.class, f -> { + if (f instanceof AggregateFunction) { + failures.add(fail(f, "Aggregate functions are not allowed in SORT [{}]", f.functionName())); + } + }); + }); + } + } + private static void checkFilterConditionType(LogicalPlan p, Set localFailures) { if (p instanceof Filter f) { Expression condition = f.condition(); @@ -512,7 +525,7 @@ private static void checkRow(LogicalPlan p, Set failures) { if (p instanceof Row row) { row.fields().forEach(a -> { if (DataType.isRepresentable(a.dataType()) == false) { - failures.add(fail(a, "cannot use [{}] directly in a row assignment", a.child().sourceText())); + failures.add(fail(a.child(), "cannot use [{}] directly in a row assignment", a.child().sourceText())); } }); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index e3dff2032e6e8..8804a031de78c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -11,7 +11,6 @@ import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import java.util.ArrayList; import java.util.Collections; @@ -24,7 +23,6 @@ public static List getNamedWriteables() { entries.add(MatchQueryPredicate.ENTRY); entries.add(MultiMatchQueryPredicate.ENTRY); - entries.add(StringQueryPredicate.ENTRY); entries.add(QueryString.ENTRY); entries.add(Match.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index 38153edccace7..3d6c35e914294 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; @@ -253,8 +252,6 @@ static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePu && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction spatial) { return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); - } else if (exp instanceof StringQueryPredicate) { - return true; } else if (exp instanceof Match mf) { return mf.field() instanceof FieldAttribute && DataType.isString(mf.field().dataType()); } else if (exp instanceof FullTextFunction) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 8b8bab2edbc41..c83fdbe8847a9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -28,7 +28,6 @@ null null null null -':' '|' null null @@ -38,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -159,7 +159,6 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -169,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -289,7 +289,6 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE DIGIT LETTER @@ -309,6 +308,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -339,7 +339,6 @@ MINUS ASTERISK SLASH PERCENT -EXPRESSION_COLON NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET @@ -499,4 +498,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 128, 1608, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 656, 8, 24, 11, 24, 12, 24, 657, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 666, 8, 25, 10, 25, 12, 25, 669, 9, 25, 1, 25, 3, 25, 672, 8, 25, 1, 25, 3, 25, 675, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 684, 8, 26, 10, 26, 12, 26, 687, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 695, 8, 27, 11, 27, 12, 27, 696, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 3, 34, 718, 8, 34, 1, 34, 4, 34, 721, 8, 34, 11, 34, 12, 34, 722, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 732, 8, 37, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 3, 39, 739, 8, 39, 1, 40, 1, 40, 1, 40, 5, 40, 744, 8, 40, 10, 40, 12, 40, 747, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 755, 8, 40, 10, 40, 12, 40, 758, 9, 40, 1, 40, 1, 40, 1, 40, 1, 40, 1, 40, 3, 40, 765, 8, 40, 1, 40, 3, 40, 768, 8, 40, 3, 40, 770, 8, 40, 1, 41, 4, 41, 773, 8, 41, 11, 41, 12, 41, 774, 1, 42, 4, 42, 778, 8, 42, 11, 42, 12, 42, 779, 1, 42, 1, 42, 5, 42, 784, 8, 42, 10, 42, 12, 42, 787, 9, 42, 1, 42, 1, 42, 4, 42, 791, 8, 42, 11, 42, 12, 42, 792, 1, 42, 4, 42, 796, 8, 42, 11, 42, 12, 42, 797, 1, 42, 1, 42, 5, 42, 802, 8, 42, 10, 42, 12, 42, 805, 9, 42, 3, 42, 807, 8, 42, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 813, 8, 42, 11, 42, 12, 42, 814, 1, 42, 1, 42, 3, 42, 819, 8, 42, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 3, 80, 950, 8, 80, 1, 80, 5, 80, 953, 8, 80, 10, 80, 12, 80, 956, 9, 80, 1, 80, 1, 80, 4, 80, 960, 8, 80, 11, 80, 12, 80, 961, 3, 80, 964, 8, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 5, 83, 978, 8, 83, 10, 83, 12, 83, 981, 9, 83, 1, 83, 1, 83, 3, 83, 985, 8, 83, 1, 83, 4, 83, 988, 8, 83, 11, 83, 12, 83, 989, 3, 83, 992, 8, 83, 1, 84, 1, 84, 4, 84, 996, 8, 84, 11, 84, 12, 84, 997, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 3, 101, 1075, 8, 101, 1, 102, 4, 102, 1078, 8, 102, 11, 102, 12, 102, 1079, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 3, 113, 1129, 8, 113, 1, 114, 1, 114, 3, 114, 1133, 8, 114, 1, 114, 5, 114, 1136, 8, 114, 10, 114, 12, 114, 1139, 9, 114, 1, 114, 1, 114, 3, 114, 1143, 8, 114, 1, 114, 4, 114, 1146, 8, 114, 11, 114, 12, 114, 1147, 3, 114, 1150, 8, 114, 1, 115, 1, 115, 4, 115, 1154, 8, 115, 11, 115, 12, 115, 1155, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 135, 4, 135, 1241, 8, 135, 11, 135, 12, 135, 1242, 1, 135, 1, 135, 3, 135, 1247, 8, 135, 1, 135, 4, 135, 1250, 8, 135, 11, 135, 12, 135, 1251, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 168, 4, 168, 1397, 8, 168, 11, 168, 12, 168, 1398, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 2, 685, 756, 0, 214, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 30, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 0, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 0, 176, 69, 178, 70, 180, 71, 182, 72, 184, 0, 186, 73, 188, 74, 190, 75, 192, 76, 194, 0, 196, 0, 198, 77, 200, 78, 202, 79, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 80, 218, 0, 220, 81, 222, 0, 224, 0, 226, 82, 228, 83, 230, 84, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 85, 248, 86, 250, 87, 252, 88, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 89, 268, 0, 270, 90, 272, 91, 274, 92, 276, 0, 278, 0, 280, 93, 282, 94, 284, 0, 286, 95, 288, 0, 290, 96, 292, 97, 294, 98, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 99, 316, 100, 318, 101, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 102, 334, 103, 336, 104, 338, 0, 340, 105, 342, 106, 344, 107, 346, 108, 348, 0, 350, 0, 352, 109, 354, 110, 356, 111, 358, 112, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 113, 376, 114, 378, 115, 380, 0, 382, 0, 384, 0, 386, 0, 388, 116, 390, 117, 392, 118, 394, 0, 396, 0, 398, 0, 400, 0, 402, 119, 404, 0, 406, 0, 408, 120, 410, 121, 412, 122, 414, 0, 416, 0, 418, 0, 420, 123, 422, 124, 424, 125, 426, 0, 428, 0, 430, 126, 432, 127, 434, 128, 436, 0, 438, 0, 440, 0, 442, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1635, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 0, 72, 1, 0, 0, 0, 1, 74, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 16, 444, 1, 0, 0, 0, 18, 454, 1, 0, 0, 0, 20, 461, 1, 0, 0, 0, 22, 470, 1, 0, 0, 0, 24, 477, 1, 0, 0, 0, 26, 487, 1, 0, 0, 0, 28, 494, 1, 0, 0, 0, 30, 501, 1, 0, 0, 0, 32, 508, 1, 0, 0, 0, 34, 516, 1, 0, 0, 0, 36, 528, 1, 0, 0, 0, 38, 537, 1, 0, 0, 0, 40, 543, 1, 0, 0, 0, 42, 550, 1, 0, 0, 0, 44, 557, 1, 0, 0, 0, 46, 565, 1, 0, 0, 0, 48, 573, 1, 0, 0, 0, 50, 588, 1, 0, 0, 0, 52, 600, 1, 0, 0, 0, 54, 611, 1, 0, 0, 0, 56, 619, 1, 0, 0, 0, 58, 627, 1, 0, 0, 0, 60, 635, 1, 0, 0, 0, 62, 644, 1, 0, 0, 0, 64, 655, 1, 0, 0, 0, 66, 661, 1, 0, 0, 0, 68, 678, 1, 0, 0, 0, 70, 694, 1, 0, 0, 0, 72, 700, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 706, 1, 0, 0, 0, 78, 708, 1, 0, 0, 0, 80, 710, 1, 0, 0, 0, 82, 713, 1, 0, 0, 0, 84, 715, 1, 0, 0, 0, 86, 724, 1, 0, 0, 0, 88, 726, 1, 0, 0, 0, 90, 731, 1, 0, 0, 0, 92, 733, 1, 0, 0, 0, 94, 738, 1, 0, 0, 0, 96, 769, 1, 0, 0, 0, 98, 772, 1, 0, 0, 0, 100, 818, 1, 0, 0, 0, 102, 820, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 831, 1, 0, 0, 0, 110, 833, 1, 0, 0, 0, 112, 836, 1, 0, 0, 0, 114, 838, 1, 0, 0, 0, 116, 843, 1, 0, 0, 0, 118, 845, 1, 0, 0, 0, 120, 851, 1, 0, 0, 0, 122, 857, 1, 0, 0, 0, 124, 860, 1, 0, 0, 0, 126, 863, 1, 0, 0, 0, 128, 868, 1, 0, 0, 0, 130, 873, 1, 0, 0, 0, 132, 875, 1, 0, 0, 0, 134, 879, 1, 0, 0, 0, 136, 884, 1, 0, 0, 0, 138, 890, 1, 0, 0, 0, 140, 893, 1, 0, 0, 0, 142, 895, 1, 0, 0, 0, 144, 901, 1, 0, 0, 0, 146, 903, 1, 0, 0, 0, 148, 908, 1, 0, 0, 0, 150, 911, 1, 0, 0, 0, 152, 914, 1, 0, 0, 0, 154, 917, 1, 0, 0, 0, 156, 919, 1, 0, 0, 0, 158, 922, 1, 0, 0, 0, 160, 924, 1, 0, 0, 0, 162, 927, 1, 0, 0, 0, 164, 929, 1, 0, 0, 0, 166, 931, 1, 0, 0, 0, 168, 933, 1, 0, 0, 0, 170, 935, 1, 0, 0, 0, 172, 937, 1, 0, 0, 0, 174, 942, 1, 0, 0, 0, 176, 963, 1, 0, 0, 0, 178, 965, 1, 0, 0, 0, 180, 970, 1, 0, 0, 0, 182, 991, 1, 0, 0, 0, 184, 993, 1, 0, 0, 0, 186, 1001, 1, 0, 0, 0, 188, 1003, 1, 0, 0, 0, 190, 1007, 1, 0, 0, 0, 192, 1011, 1, 0, 0, 0, 194, 1015, 1, 0, 0, 0, 196, 1020, 1, 0, 0, 0, 198, 1025, 1, 0, 0, 0, 200, 1029, 1, 0, 0, 0, 202, 1033, 1, 0, 0, 0, 204, 1037, 1, 0, 0, 0, 206, 1042, 1, 0, 0, 0, 208, 1046, 1, 0, 0, 0, 210, 1050, 1, 0, 0, 0, 212, 1054, 1, 0, 0, 0, 214, 1058, 1, 0, 0, 0, 216, 1062, 1, 0, 0, 0, 218, 1074, 1, 0, 0, 0, 220, 1077, 1, 0, 0, 0, 222, 1081, 1, 0, 0, 0, 224, 1085, 1, 0, 0, 0, 226, 1089, 1, 0, 0, 0, 228, 1093, 1, 0, 0, 0, 230, 1097, 1, 0, 0, 0, 232, 1101, 1, 0, 0, 0, 234, 1106, 1, 0, 0, 0, 236, 1110, 1, 0, 0, 0, 238, 1114, 1, 0, 0, 0, 240, 1119, 1, 0, 0, 0, 242, 1128, 1, 0, 0, 0, 244, 1149, 1, 0, 0, 0, 246, 1153, 1, 0, 0, 0, 248, 1157, 1, 0, 0, 0, 250, 1161, 1, 0, 0, 0, 252, 1165, 1, 0, 0, 0, 254, 1169, 1, 0, 0, 0, 256, 1174, 1, 0, 0, 0, 258, 1178, 1, 0, 0, 0, 260, 1182, 1, 0, 0, 0, 262, 1186, 1, 0, 0, 0, 264, 1191, 1, 0, 0, 0, 266, 1196, 1, 0, 0, 0, 268, 1199, 1, 0, 0, 0, 270, 1203, 1, 0, 0, 0, 272, 1207, 1, 0, 0, 0, 274, 1211, 1, 0, 0, 0, 276, 1215, 1, 0, 0, 0, 278, 1220, 1, 0, 0, 0, 280, 1225, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1237, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1253, 1, 0, 0, 0, 290, 1257, 1, 0, 0, 0, 292, 1261, 1, 0, 0, 0, 294, 1265, 1, 0, 0, 0, 296, 1269, 1, 0, 0, 0, 298, 1275, 1, 0, 0, 0, 300, 1279, 1, 0, 0, 0, 302, 1283, 1, 0, 0, 0, 304, 1287, 1, 0, 0, 0, 306, 1291, 1, 0, 0, 0, 308, 1295, 1, 0, 0, 0, 310, 1299, 1, 0, 0, 0, 312, 1304, 1, 0, 0, 0, 314, 1309, 1, 0, 0, 0, 316, 1313, 1, 0, 0, 0, 318, 1317, 1, 0, 0, 0, 320, 1321, 1, 0, 0, 0, 322, 1326, 1, 0, 0, 0, 324, 1330, 1, 0, 0, 0, 326, 1335, 1, 0, 0, 0, 328, 1340, 1, 0, 0, 0, 330, 1344, 1, 0, 0, 0, 332, 1348, 1, 0, 0, 0, 334, 1352, 1, 0, 0, 0, 336, 1356, 1, 0, 0, 0, 338, 1360, 1, 0, 0, 0, 340, 1365, 1, 0, 0, 0, 342, 1370, 1, 0, 0, 0, 344, 1374, 1, 0, 0, 0, 346, 1378, 1, 0, 0, 0, 348, 1382, 1, 0, 0, 0, 350, 1387, 1, 0, 0, 0, 352, 1396, 1, 0, 0, 0, 354, 1400, 1, 0, 0, 0, 356, 1404, 1, 0, 0, 0, 358, 1408, 1, 0, 0, 0, 360, 1412, 1, 0, 0, 0, 362, 1417, 1, 0, 0, 0, 364, 1421, 1, 0, 0, 0, 366, 1425, 1, 0, 0, 0, 368, 1429, 1, 0, 0, 0, 370, 1434, 1, 0, 0, 0, 372, 1438, 1, 0, 0, 0, 374, 1442, 1, 0, 0, 0, 376, 1446, 1, 0, 0, 0, 378, 1450, 1, 0, 0, 0, 380, 1454, 1, 0, 0, 0, 382, 1460, 1, 0, 0, 0, 384, 1464, 1, 0, 0, 0, 386, 1468, 1, 0, 0, 0, 388, 1472, 1, 0, 0, 0, 390, 1476, 1, 0, 0, 0, 392, 1480, 1, 0, 0, 0, 394, 1484, 1, 0, 0, 0, 396, 1489, 1, 0, 0, 0, 398, 1493, 1, 0, 0, 0, 400, 1497, 1, 0, 0, 0, 402, 1503, 1, 0, 0, 0, 404, 1512, 1, 0, 0, 0, 406, 1516, 1, 0, 0, 0, 408, 1520, 1, 0, 0, 0, 410, 1524, 1, 0, 0, 0, 412, 1528, 1, 0, 0, 0, 414, 1532, 1, 0, 0, 0, 416, 1537, 1, 0, 0, 0, 418, 1543, 1, 0, 0, 0, 420, 1549, 1, 0, 0, 0, 422, 1553, 1, 0, 0, 0, 424, 1557, 1, 0, 0, 0, 426, 1561, 1, 0, 0, 0, 428, 1567, 1, 0, 0, 0, 430, 1573, 1, 0, 0, 0, 432, 1577, 1, 0, 0, 0, 434, 1581, 1, 0, 0, 0, 436, 1585, 1, 0, 0, 0, 438, 1591, 1, 0, 0, 0, 440, 1597, 1, 0, 0, 0, 442, 1603, 1, 0, 0, 0, 444, 445, 7, 0, 0, 0, 445, 446, 7, 1, 0, 0, 446, 447, 7, 2, 0, 0, 447, 448, 7, 2, 0, 0, 448, 449, 7, 3, 0, 0, 449, 450, 7, 4, 0, 0, 450, 451, 7, 5, 0, 0, 451, 452, 1, 0, 0, 0, 452, 453, 6, 0, 0, 0, 453, 17, 1, 0, 0, 0, 454, 455, 7, 0, 0, 0, 455, 456, 7, 6, 0, 0, 456, 457, 7, 7, 0, 0, 457, 458, 7, 8, 0, 0, 458, 459, 1, 0, 0, 0, 459, 460, 6, 1, 1, 0, 460, 19, 1, 0, 0, 0, 461, 462, 7, 3, 0, 0, 462, 463, 7, 9, 0, 0, 463, 464, 7, 6, 0, 0, 464, 465, 7, 1, 0, 0, 465, 466, 7, 4, 0, 0, 466, 467, 7, 10, 0, 0, 467, 468, 1, 0, 0, 0, 468, 469, 6, 2, 2, 0, 469, 21, 1, 0, 0, 0, 470, 471, 7, 3, 0, 0, 471, 472, 7, 11, 0, 0, 472, 473, 7, 12, 0, 0, 473, 474, 7, 13, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 6, 3, 0, 0, 476, 23, 1, 0, 0, 0, 477, 478, 7, 3, 0, 0, 478, 479, 7, 14, 0, 0, 479, 480, 7, 8, 0, 0, 480, 481, 7, 13, 0, 0, 481, 482, 7, 12, 0, 0, 482, 483, 7, 1, 0, 0, 483, 484, 7, 9, 0, 0, 484, 485, 1, 0, 0, 0, 485, 486, 6, 4, 3, 0, 486, 25, 1, 0, 0, 0, 487, 488, 7, 15, 0, 0, 488, 489, 7, 6, 0, 0, 489, 490, 7, 7, 0, 0, 490, 491, 7, 16, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 6, 5, 4, 0, 493, 27, 1, 0, 0, 0, 494, 495, 7, 17, 0, 0, 495, 496, 7, 6, 0, 0, 496, 497, 7, 7, 0, 0, 497, 498, 7, 18, 0, 0, 498, 499, 1, 0, 0, 0, 499, 500, 6, 6, 0, 0, 500, 29, 1, 0, 0, 0, 501, 502, 7, 18, 0, 0, 502, 503, 7, 3, 0, 0, 503, 504, 7, 3, 0, 0, 504, 505, 7, 8, 0, 0, 505, 506, 1, 0, 0, 0, 506, 507, 6, 7, 1, 0, 507, 31, 1, 0, 0, 0, 508, 509, 7, 13, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 16, 0, 0, 511, 512, 7, 1, 0, 0, 512, 513, 7, 5, 0, 0, 513, 514, 1, 0, 0, 0, 514, 515, 6, 8, 0, 0, 515, 33, 1, 0, 0, 0, 516, 517, 7, 16, 0, 0, 517, 518, 7, 11, 0, 0, 518, 519, 5, 95, 0, 0, 519, 520, 7, 3, 0, 0, 520, 521, 7, 14, 0, 0, 521, 522, 7, 8, 0, 0, 522, 523, 7, 12, 0, 0, 523, 524, 7, 9, 0, 0, 524, 525, 7, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 527, 6, 9, 5, 0, 527, 35, 1, 0, 0, 0, 528, 529, 7, 6, 0, 0, 529, 530, 7, 3, 0, 0, 530, 531, 7, 9, 0, 0, 531, 532, 7, 12, 0, 0, 532, 533, 7, 16, 0, 0, 533, 534, 7, 3, 0, 0, 534, 535, 1, 0, 0, 0, 535, 536, 6, 10, 6, 0, 536, 37, 1, 0, 0, 0, 537, 538, 7, 6, 0, 0, 538, 539, 7, 7, 0, 0, 539, 540, 7, 19, 0, 0, 540, 541, 1, 0, 0, 0, 541, 542, 6, 11, 0, 0, 542, 39, 1, 0, 0, 0, 543, 544, 7, 2, 0, 0, 544, 545, 7, 10, 0, 0, 545, 546, 7, 7, 0, 0, 546, 547, 7, 19, 0, 0, 547, 548, 1, 0, 0, 0, 548, 549, 6, 12, 7, 0, 549, 41, 1, 0, 0, 0, 550, 551, 7, 2, 0, 0, 551, 552, 7, 7, 0, 0, 552, 553, 7, 6, 0, 0, 553, 554, 7, 5, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 6, 13, 0, 0, 556, 43, 1, 0, 0, 0, 557, 558, 7, 2, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 12, 0, 0, 560, 561, 7, 5, 0, 0, 561, 562, 7, 2, 0, 0, 562, 563, 1, 0, 0, 0, 563, 564, 6, 14, 0, 0, 564, 45, 1, 0, 0, 0, 565, 566, 7, 19, 0, 0, 566, 567, 7, 10, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 7, 6, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 6, 15, 0, 0, 572, 47, 1, 0, 0, 0, 573, 574, 4, 16, 0, 0, 574, 575, 7, 1, 0, 0, 575, 576, 7, 9, 0, 0, 576, 577, 7, 13, 0, 0, 577, 578, 7, 1, 0, 0, 578, 579, 7, 9, 0, 0, 579, 580, 7, 3, 0, 0, 580, 581, 7, 2, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 12, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 2, 0, 0, 585, 586, 1, 0, 0, 0, 586, 587, 6, 16, 0, 0, 587, 49, 1, 0, 0, 0, 588, 589, 4, 17, 1, 0, 589, 590, 7, 13, 0, 0, 590, 591, 7, 7, 0, 0, 591, 592, 7, 7, 0, 0, 592, 593, 7, 18, 0, 0, 593, 594, 7, 20, 0, 0, 594, 595, 7, 8, 0, 0, 595, 596, 5, 95, 0, 0, 596, 597, 5, 128020, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 6, 17, 8, 0, 599, 51, 1, 0, 0, 0, 600, 601, 4, 18, 2, 0, 601, 602, 7, 16, 0, 0, 602, 603, 7, 3, 0, 0, 603, 604, 7, 5, 0, 0, 604, 605, 7, 6, 0, 0, 605, 606, 7, 1, 0, 0, 606, 607, 7, 4, 0, 0, 607, 608, 7, 2, 0, 0, 608, 609, 1, 0, 0, 0, 609, 610, 6, 18, 9, 0, 610, 53, 1, 0, 0, 0, 611, 612, 4, 19, 3, 0, 612, 613, 7, 21, 0, 0, 613, 614, 7, 7, 0, 0, 614, 615, 7, 1, 0, 0, 615, 616, 7, 9, 0, 0, 616, 617, 1, 0, 0, 0, 617, 618, 6, 19, 10, 0, 618, 55, 1, 0, 0, 0, 619, 620, 4, 20, 4, 0, 620, 621, 7, 15, 0, 0, 621, 622, 7, 20, 0, 0, 622, 623, 7, 13, 0, 0, 623, 624, 7, 13, 0, 0, 624, 625, 1, 0, 0, 0, 625, 626, 6, 20, 10, 0, 626, 57, 1, 0, 0, 0, 627, 628, 4, 21, 5, 0, 628, 629, 7, 13, 0, 0, 629, 630, 7, 3, 0, 0, 630, 631, 7, 15, 0, 0, 631, 632, 7, 5, 0, 0, 632, 633, 1, 0, 0, 0, 633, 634, 6, 21, 10, 0, 634, 59, 1, 0, 0, 0, 635, 636, 4, 22, 6, 0, 636, 637, 7, 6, 0, 0, 637, 638, 7, 1, 0, 0, 638, 639, 7, 17, 0, 0, 639, 640, 7, 10, 0, 0, 640, 641, 7, 5, 0, 0, 641, 642, 1, 0, 0, 0, 642, 643, 6, 22, 10, 0, 643, 61, 1, 0, 0, 0, 644, 645, 4, 23, 7, 0, 645, 646, 7, 13, 0, 0, 646, 647, 7, 7, 0, 0, 647, 648, 7, 7, 0, 0, 648, 649, 7, 18, 0, 0, 649, 650, 7, 20, 0, 0, 650, 651, 7, 8, 0, 0, 651, 652, 1, 0, 0, 0, 652, 653, 6, 23, 10, 0, 653, 63, 1, 0, 0, 0, 654, 656, 8, 22, 0, 0, 655, 654, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 655, 1, 0, 0, 0, 657, 658, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 660, 6, 24, 0, 0, 660, 65, 1, 0, 0, 0, 661, 662, 5, 47, 0, 0, 662, 663, 5, 47, 0, 0, 663, 667, 1, 0, 0, 0, 664, 666, 8, 23, 0, 0, 665, 664, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 667, 668, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 670, 672, 5, 13, 0, 0, 671, 670, 1, 0, 0, 0, 671, 672, 1, 0, 0, 0, 672, 674, 1, 0, 0, 0, 673, 675, 5, 10, 0, 0, 674, 673, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 675, 676, 1, 0, 0, 0, 676, 677, 6, 25, 11, 0, 677, 67, 1, 0, 0, 0, 678, 679, 5, 47, 0, 0, 679, 680, 5, 42, 0, 0, 680, 685, 1, 0, 0, 0, 681, 684, 3, 68, 26, 0, 682, 684, 9, 0, 0, 0, 683, 681, 1, 0, 0, 0, 683, 682, 1, 0, 0, 0, 684, 687, 1, 0, 0, 0, 685, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 688, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 688, 689, 5, 42, 0, 0, 689, 690, 5, 47, 0, 0, 690, 691, 1, 0, 0, 0, 691, 692, 6, 26, 11, 0, 692, 69, 1, 0, 0, 0, 693, 695, 7, 24, 0, 0, 694, 693, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 694, 1, 0, 0, 0, 696, 697, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 6, 27, 11, 0, 699, 71, 1, 0, 0, 0, 700, 701, 5, 58, 0, 0, 701, 73, 1, 0, 0, 0, 702, 703, 5, 124, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 6, 29, 12, 0, 705, 75, 1, 0, 0, 0, 706, 707, 7, 25, 0, 0, 707, 77, 1, 0, 0, 0, 708, 709, 7, 26, 0, 0, 709, 79, 1, 0, 0, 0, 710, 711, 5, 92, 0, 0, 711, 712, 7, 27, 0, 0, 712, 81, 1, 0, 0, 0, 713, 714, 8, 28, 0, 0, 714, 83, 1, 0, 0, 0, 715, 717, 7, 3, 0, 0, 716, 718, 7, 29, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 720, 1, 0, 0, 0, 719, 721, 3, 76, 30, 0, 720, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 85, 1, 0, 0, 0, 724, 725, 5, 64, 0, 0, 725, 87, 1, 0, 0, 0, 726, 727, 5, 96, 0, 0, 727, 89, 1, 0, 0, 0, 728, 732, 8, 30, 0, 0, 729, 730, 5, 96, 0, 0, 730, 732, 5, 96, 0, 0, 731, 728, 1, 0, 0, 0, 731, 729, 1, 0, 0, 0, 732, 91, 1, 0, 0, 0, 733, 734, 5, 95, 0, 0, 734, 93, 1, 0, 0, 0, 735, 739, 3, 78, 31, 0, 736, 739, 3, 76, 30, 0, 737, 739, 3, 92, 38, 0, 738, 735, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 737, 1, 0, 0, 0, 739, 95, 1, 0, 0, 0, 740, 745, 5, 34, 0, 0, 741, 744, 3, 80, 32, 0, 742, 744, 3, 82, 33, 0, 743, 741, 1, 0, 0, 0, 743, 742, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 748, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 748, 770, 5, 34, 0, 0, 749, 750, 5, 34, 0, 0, 750, 751, 5, 34, 0, 0, 751, 752, 5, 34, 0, 0, 752, 756, 1, 0, 0, 0, 753, 755, 8, 23, 0, 0, 754, 753, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 759, 760, 5, 34, 0, 0, 760, 761, 5, 34, 0, 0, 761, 762, 5, 34, 0, 0, 762, 764, 1, 0, 0, 0, 763, 765, 5, 34, 0, 0, 764, 763, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 767, 1, 0, 0, 0, 766, 768, 5, 34, 0, 0, 767, 766, 1, 0, 0, 0, 767, 768, 1, 0, 0, 0, 768, 770, 1, 0, 0, 0, 769, 740, 1, 0, 0, 0, 769, 749, 1, 0, 0, 0, 770, 97, 1, 0, 0, 0, 771, 773, 3, 76, 30, 0, 772, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 99, 1, 0, 0, 0, 776, 778, 3, 76, 30, 0, 777, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 785, 3, 116, 50, 0, 782, 784, 3, 76, 30, 0, 783, 782, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 819, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 790, 3, 116, 50, 0, 789, 791, 3, 76, 30, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 819, 1, 0, 0, 0, 794, 796, 3, 76, 30, 0, 795, 794, 1, 0, 0, 0, 796, 797, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 806, 1, 0, 0, 0, 799, 803, 3, 116, 50, 0, 800, 802, 3, 76, 30, 0, 801, 800, 1, 0, 0, 0, 802, 805, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 809, 3, 84, 34, 0, 809, 819, 1, 0, 0, 0, 810, 812, 3, 116, 50, 0, 811, 813, 3, 76, 30, 0, 812, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 3, 84, 34, 0, 817, 819, 1, 0, 0, 0, 818, 777, 1, 0, 0, 0, 818, 788, 1, 0, 0, 0, 818, 795, 1, 0, 0, 0, 818, 810, 1, 0, 0, 0, 819, 101, 1, 0, 0, 0, 820, 821, 7, 31, 0, 0, 821, 822, 7, 32, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 9, 0, 0, 825, 826, 7, 0, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 7, 12, 0, 0, 828, 829, 7, 2, 0, 0, 829, 830, 7, 4, 0, 0, 830, 107, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 109, 1, 0, 0, 0, 833, 834, 5, 58, 0, 0, 834, 835, 5, 58, 0, 0, 835, 111, 1, 0, 0, 0, 836, 837, 5, 44, 0, 0, 837, 113, 1, 0, 0, 0, 838, 839, 7, 0, 0, 0, 839, 840, 7, 3, 0, 0, 840, 841, 7, 2, 0, 0, 841, 842, 7, 4, 0, 0, 842, 115, 1, 0, 0, 0, 843, 844, 5, 46, 0, 0, 844, 117, 1, 0, 0, 0, 845, 846, 7, 15, 0, 0, 846, 847, 7, 12, 0, 0, 847, 848, 7, 13, 0, 0, 848, 849, 7, 2, 0, 0, 849, 850, 7, 3, 0, 0, 850, 119, 1, 0, 0, 0, 851, 852, 7, 15, 0, 0, 852, 853, 7, 1, 0, 0, 853, 854, 7, 6, 0, 0, 854, 855, 7, 2, 0, 0, 855, 856, 7, 5, 0, 0, 856, 121, 1, 0, 0, 0, 857, 858, 7, 1, 0, 0, 858, 859, 7, 9, 0, 0, 859, 123, 1, 0, 0, 0, 860, 861, 7, 1, 0, 0, 861, 862, 7, 2, 0, 0, 862, 125, 1, 0, 0, 0, 863, 864, 7, 13, 0, 0, 864, 865, 7, 12, 0, 0, 865, 866, 7, 2, 0, 0, 866, 867, 7, 5, 0, 0, 867, 127, 1, 0, 0, 0, 868, 869, 7, 13, 0, 0, 869, 870, 7, 1, 0, 0, 870, 871, 7, 18, 0, 0, 871, 872, 7, 3, 0, 0, 872, 129, 1, 0, 0, 0, 873, 874, 5, 40, 0, 0, 874, 131, 1, 0, 0, 0, 875, 876, 7, 9, 0, 0, 876, 877, 7, 7, 0, 0, 877, 878, 7, 5, 0, 0, 878, 133, 1, 0, 0, 0, 879, 880, 7, 9, 0, 0, 880, 881, 7, 20, 0, 0, 881, 882, 7, 13, 0, 0, 882, 883, 7, 13, 0, 0, 883, 135, 1, 0, 0, 0, 884, 885, 7, 9, 0, 0, 885, 886, 7, 20, 0, 0, 886, 887, 7, 13, 0, 0, 887, 888, 7, 13, 0, 0, 888, 889, 7, 2, 0, 0, 889, 137, 1, 0, 0, 0, 890, 891, 7, 7, 0, 0, 891, 892, 7, 6, 0, 0, 892, 139, 1, 0, 0, 0, 893, 894, 5, 63, 0, 0, 894, 141, 1, 0, 0, 0, 895, 896, 7, 6, 0, 0, 896, 897, 7, 13, 0, 0, 897, 898, 7, 1, 0, 0, 898, 899, 7, 18, 0, 0, 899, 900, 7, 3, 0, 0, 900, 143, 1, 0, 0, 0, 901, 902, 5, 41, 0, 0, 902, 145, 1, 0, 0, 0, 903, 904, 7, 5, 0, 0, 904, 905, 7, 6, 0, 0, 905, 906, 7, 20, 0, 0, 906, 907, 7, 3, 0, 0, 907, 147, 1, 0, 0, 0, 908, 909, 5, 61, 0, 0, 909, 910, 5, 61, 0, 0, 910, 149, 1, 0, 0, 0, 911, 912, 5, 61, 0, 0, 912, 913, 5, 126, 0, 0, 913, 151, 1, 0, 0, 0, 914, 915, 5, 33, 0, 0, 915, 916, 5, 61, 0, 0, 916, 153, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 155, 1, 0, 0, 0, 919, 920, 5, 60, 0, 0, 920, 921, 5, 61, 0, 0, 921, 157, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 159, 1, 0, 0, 0, 924, 925, 5, 62, 0, 0, 925, 926, 5, 61, 0, 0, 926, 161, 1, 0, 0, 0, 927, 928, 5, 43, 0, 0, 928, 163, 1, 0, 0, 0, 929, 930, 5, 45, 0, 0, 930, 165, 1, 0, 0, 0, 931, 932, 5, 42, 0, 0, 932, 167, 1, 0, 0, 0, 933, 934, 5, 47, 0, 0, 934, 169, 1, 0, 0, 0, 935, 936, 5, 37, 0, 0, 936, 171, 1, 0, 0, 0, 937, 938, 4, 78, 8, 0, 938, 939, 3, 72, 28, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 78, 13, 0, 941, 173, 1, 0, 0, 0, 942, 943, 3, 46, 15, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 79, 14, 0, 945, 175, 1, 0, 0, 0, 946, 949, 3, 140, 62, 0, 947, 950, 3, 78, 31, 0, 948, 950, 3, 92, 38, 0, 949, 947, 1, 0, 0, 0, 949, 948, 1, 0, 0, 0, 950, 954, 1, 0, 0, 0, 951, 953, 3, 94, 39, 0, 952, 951, 1, 0, 0, 0, 953, 956, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 964, 1, 0, 0, 0, 956, 954, 1, 0, 0, 0, 957, 959, 3, 140, 62, 0, 958, 960, 3, 76, 30, 0, 959, 958, 1, 0, 0, 0, 960, 961, 1, 0, 0, 0, 961, 959, 1, 0, 0, 0, 961, 962, 1, 0, 0, 0, 962, 964, 1, 0, 0, 0, 963, 946, 1, 0, 0, 0, 963, 957, 1, 0, 0, 0, 964, 177, 1, 0, 0, 0, 965, 966, 5, 91, 0, 0, 966, 967, 1, 0, 0, 0, 967, 968, 6, 81, 0, 0, 968, 969, 6, 81, 0, 0, 969, 179, 1, 0, 0, 0, 970, 971, 5, 93, 0, 0, 971, 972, 1, 0, 0, 0, 972, 973, 6, 82, 12, 0, 973, 974, 6, 82, 12, 0, 974, 181, 1, 0, 0, 0, 975, 979, 3, 78, 31, 0, 976, 978, 3, 94, 39, 0, 977, 976, 1, 0, 0, 0, 978, 981, 1, 0, 0, 0, 979, 977, 1, 0, 0, 0, 979, 980, 1, 0, 0, 0, 980, 992, 1, 0, 0, 0, 981, 979, 1, 0, 0, 0, 982, 985, 3, 92, 38, 0, 983, 985, 3, 86, 35, 0, 984, 982, 1, 0, 0, 0, 984, 983, 1, 0, 0, 0, 985, 987, 1, 0, 0, 0, 986, 988, 3, 94, 39, 0, 987, 986, 1, 0, 0, 0, 988, 989, 1, 0, 0, 0, 989, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 992, 1, 0, 0, 0, 991, 975, 1, 0, 0, 0, 991, 984, 1, 0, 0, 0, 992, 183, 1, 0, 0, 0, 993, 995, 3, 88, 36, 0, 994, 996, 3, 90, 37, 0, 995, 994, 1, 0, 0, 0, 996, 997, 1, 0, 0, 0, 997, 995, 1, 0, 0, 0, 997, 998, 1, 0, 0, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 3, 88, 36, 0, 1000, 185, 1, 0, 0, 0, 1001, 1002, 3, 184, 84, 0, 1002, 187, 1, 0, 0, 0, 1003, 1004, 3, 66, 25, 0, 1004, 1005, 1, 0, 0, 0, 1005, 1006, 6, 86, 11, 0, 1006, 189, 1, 0, 0, 0, 1007, 1008, 3, 68, 26, 0, 1008, 1009, 1, 0, 0, 0, 1009, 1010, 6, 87, 11, 0, 1010, 191, 1, 0, 0, 0, 1011, 1012, 3, 70, 27, 0, 1012, 1013, 1, 0, 0, 0, 1013, 1014, 6, 88, 11, 0, 1014, 193, 1, 0, 0, 0, 1015, 1016, 3, 178, 81, 0, 1016, 1017, 1, 0, 0, 0, 1017, 1018, 6, 89, 15, 0, 1018, 1019, 6, 89, 16, 0, 1019, 195, 1, 0, 0, 0, 1020, 1021, 3, 74, 29, 0, 1021, 1022, 1, 0, 0, 0, 1022, 1023, 6, 90, 17, 0, 1023, 1024, 6, 90, 12, 0, 1024, 197, 1, 0, 0, 0, 1025, 1026, 3, 70, 27, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 91, 11, 0, 1028, 199, 1, 0, 0, 0, 1029, 1030, 3, 66, 25, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 92, 11, 0, 1032, 201, 1, 0, 0, 0, 1033, 1034, 3, 68, 26, 0, 1034, 1035, 1, 0, 0, 0, 1035, 1036, 6, 93, 11, 0, 1036, 203, 1, 0, 0, 0, 1037, 1038, 3, 74, 29, 0, 1038, 1039, 1, 0, 0, 0, 1039, 1040, 6, 94, 17, 0, 1040, 1041, 6, 94, 12, 0, 1041, 205, 1, 0, 0, 0, 1042, 1043, 3, 178, 81, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 95, 15, 0, 1045, 207, 1, 0, 0, 0, 1046, 1047, 3, 180, 82, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 96, 18, 0, 1049, 209, 1, 0, 0, 0, 1050, 1051, 3, 72, 28, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 97, 13, 0, 1053, 211, 1, 0, 0, 0, 1054, 1055, 3, 112, 48, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 98, 19, 0, 1057, 213, 1, 0, 0, 0, 1058, 1059, 3, 108, 46, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 99, 20, 0, 1061, 215, 1, 0, 0, 0, 1062, 1063, 7, 16, 0, 0, 1063, 1064, 7, 3, 0, 0, 1064, 1065, 7, 5, 0, 0, 1065, 1066, 7, 12, 0, 0, 1066, 1067, 7, 0, 0, 0, 1067, 1068, 7, 12, 0, 0, 1068, 1069, 7, 5, 0, 0, 1069, 1070, 7, 12, 0, 0, 1070, 217, 1, 0, 0, 0, 1071, 1075, 8, 33, 0, 0, 1072, 1073, 5, 47, 0, 0, 1073, 1075, 8, 34, 0, 0, 1074, 1071, 1, 0, 0, 0, 1074, 1072, 1, 0, 0, 0, 1075, 219, 1, 0, 0, 0, 1076, 1078, 3, 218, 101, 0, 1077, 1076, 1, 0, 0, 0, 1078, 1079, 1, 0, 0, 0, 1079, 1077, 1, 0, 0, 0, 1079, 1080, 1, 0, 0, 0, 1080, 221, 1, 0, 0, 0, 1081, 1082, 3, 220, 102, 0, 1082, 1083, 1, 0, 0, 0, 1083, 1084, 6, 103, 21, 0, 1084, 223, 1, 0, 0, 0, 1085, 1086, 3, 96, 40, 0, 1086, 1087, 1, 0, 0, 0, 1087, 1088, 6, 104, 22, 0, 1088, 225, 1, 0, 0, 0, 1089, 1090, 3, 66, 25, 0, 1090, 1091, 1, 0, 0, 0, 1091, 1092, 6, 105, 11, 0, 1092, 227, 1, 0, 0, 0, 1093, 1094, 3, 68, 26, 0, 1094, 1095, 1, 0, 0, 0, 1095, 1096, 6, 106, 11, 0, 1096, 229, 1, 0, 0, 0, 1097, 1098, 3, 70, 27, 0, 1098, 1099, 1, 0, 0, 0, 1099, 1100, 6, 107, 11, 0, 1100, 231, 1, 0, 0, 0, 1101, 1102, 3, 74, 29, 0, 1102, 1103, 1, 0, 0, 0, 1103, 1104, 6, 108, 17, 0, 1104, 1105, 6, 108, 12, 0, 1105, 233, 1, 0, 0, 0, 1106, 1107, 3, 116, 50, 0, 1107, 1108, 1, 0, 0, 0, 1108, 1109, 6, 109, 23, 0, 1109, 235, 1, 0, 0, 0, 1110, 1111, 3, 112, 48, 0, 1111, 1112, 1, 0, 0, 0, 1112, 1113, 6, 110, 19, 0, 1113, 237, 1, 0, 0, 0, 1114, 1115, 4, 111, 9, 0, 1115, 1116, 3, 140, 62, 0, 1116, 1117, 1, 0, 0, 0, 1117, 1118, 6, 111, 24, 0, 1118, 239, 1, 0, 0, 0, 1119, 1120, 4, 112, 10, 0, 1120, 1121, 3, 176, 80, 0, 1121, 1122, 1, 0, 0, 0, 1122, 1123, 6, 112, 25, 0, 1123, 241, 1, 0, 0, 0, 1124, 1129, 3, 78, 31, 0, 1125, 1129, 3, 76, 30, 0, 1126, 1129, 3, 92, 38, 0, 1127, 1129, 3, 166, 75, 0, 1128, 1124, 1, 0, 0, 0, 1128, 1125, 1, 0, 0, 0, 1128, 1126, 1, 0, 0, 0, 1128, 1127, 1, 0, 0, 0, 1129, 243, 1, 0, 0, 0, 1130, 1133, 3, 78, 31, 0, 1131, 1133, 3, 166, 75, 0, 1132, 1130, 1, 0, 0, 0, 1132, 1131, 1, 0, 0, 0, 1133, 1137, 1, 0, 0, 0, 1134, 1136, 3, 242, 113, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1139, 1, 0, 0, 0, 1137, 1135, 1, 0, 0, 0, 1137, 1138, 1, 0, 0, 0, 1138, 1150, 1, 0, 0, 0, 1139, 1137, 1, 0, 0, 0, 1140, 1143, 3, 92, 38, 0, 1141, 1143, 3, 86, 35, 0, 1142, 1140, 1, 0, 0, 0, 1142, 1141, 1, 0, 0, 0, 1143, 1145, 1, 0, 0, 0, 1144, 1146, 3, 242, 113, 0, 1145, 1144, 1, 0, 0, 0, 1146, 1147, 1, 0, 0, 0, 1147, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1150, 1, 0, 0, 0, 1149, 1132, 1, 0, 0, 0, 1149, 1142, 1, 0, 0, 0, 1150, 245, 1, 0, 0, 0, 1151, 1154, 3, 244, 114, 0, 1152, 1154, 3, 184, 84, 0, 1153, 1151, 1, 0, 0, 0, 1153, 1152, 1, 0, 0, 0, 1154, 1155, 1, 0, 0, 0, 1155, 1153, 1, 0, 0, 0, 1155, 1156, 1, 0, 0, 0, 1156, 247, 1, 0, 0, 0, 1157, 1158, 3, 66, 25, 0, 1158, 1159, 1, 0, 0, 0, 1159, 1160, 6, 116, 11, 0, 1160, 249, 1, 0, 0, 0, 1161, 1162, 3, 68, 26, 0, 1162, 1163, 1, 0, 0, 0, 1163, 1164, 6, 117, 11, 0, 1164, 251, 1, 0, 0, 0, 1165, 1166, 3, 70, 27, 0, 1166, 1167, 1, 0, 0, 0, 1167, 1168, 6, 118, 11, 0, 1168, 253, 1, 0, 0, 0, 1169, 1170, 3, 74, 29, 0, 1170, 1171, 1, 0, 0, 0, 1171, 1172, 6, 119, 17, 0, 1172, 1173, 6, 119, 12, 0, 1173, 255, 1, 0, 0, 0, 1174, 1175, 3, 108, 46, 0, 1175, 1176, 1, 0, 0, 0, 1176, 1177, 6, 120, 20, 0, 1177, 257, 1, 0, 0, 0, 1178, 1179, 3, 112, 48, 0, 1179, 1180, 1, 0, 0, 0, 1180, 1181, 6, 121, 19, 0, 1181, 259, 1, 0, 0, 0, 1182, 1183, 3, 116, 50, 0, 1183, 1184, 1, 0, 0, 0, 1184, 1185, 6, 122, 23, 0, 1185, 261, 1, 0, 0, 0, 1186, 1187, 4, 123, 11, 0, 1187, 1188, 3, 140, 62, 0, 1188, 1189, 1, 0, 0, 0, 1189, 1190, 6, 123, 24, 0, 1190, 263, 1, 0, 0, 0, 1191, 1192, 4, 124, 12, 0, 1192, 1193, 3, 176, 80, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 124, 25, 0, 1195, 265, 1, 0, 0, 0, 1196, 1197, 7, 12, 0, 0, 1197, 1198, 7, 2, 0, 0, 1198, 267, 1, 0, 0, 0, 1199, 1200, 3, 246, 115, 0, 1200, 1201, 1, 0, 0, 0, 1201, 1202, 6, 126, 26, 0, 1202, 269, 1, 0, 0, 0, 1203, 1204, 3, 66, 25, 0, 1204, 1205, 1, 0, 0, 0, 1205, 1206, 6, 127, 11, 0, 1206, 271, 1, 0, 0, 0, 1207, 1208, 3, 68, 26, 0, 1208, 1209, 1, 0, 0, 0, 1209, 1210, 6, 128, 11, 0, 1210, 273, 1, 0, 0, 0, 1211, 1212, 3, 70, 27, 0, 1212, 1213, 1, 0, 0, 0, 1213, 1214, 6, 129, 11, 0, 1214, 275, 1, 0, 0, 0, 1215, 1216, 3, 74, 29, 0, 1216, 1217, 1, 0, 0, 0, 1217, 1218, 6, 130, 17, 0, 1218, 1219, 6, 130, 12, 0, 1219, 277, 1, 0, 0, 0, 1220, 1221, 3, 178, 81, 0, 1221, 1222, 1, 0, 0, 0, 1222, 1223, 6, 131, 15, 0, 1223, 1224, 6, 131, 27, 0, 1224, 279, 1, 0, 0, 0, 1225, 1226, 7, 7, 0, 0, 1226, 1227, 7, 9, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 7, 19, 0, 0, 1231, 1232, 7, 1, 0, 0, 1232, 1233, 7, 5, 0, 0, 1233, 1234, 7, 10, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1236, 6, 133, 28, 0, 1236, 283, 1, 0, 0, 0, 1237, 1238, 8, 35, 0, 0, 1238, 285, 1, 0, 0, 0, 1239, 1241, 3, 284, 134, 0, 1240, 1239, 1, 0, 0, 0, 1241, 1242, 1, 0, 0, 0, 1242, 1240, 1, 0, 0, 0, 1242, 1243, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1245, 3, 72, 28, 0, 1245, 1247, 1, 0, 0, 0, 1246, 1240, 1, 0, 0, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1249, 1, 0, 0, 0, 1248, 1250, 3, 284, 134, 0, 1249, 1248, 1, 0, 0, 0, 1250, 1251, 1, 0, 0, 0, 1251, 1249, 1, 0, 0, 0, 1251, 1252, 1, 0, 0, 0, 1252, 287, 1, 0, 0, 0, 1253, 1254, 3, 286, 135, 0, 1254, 1255, 1, 0, 0, 0, 1255, 1256, 6, 136, 29, 0, 1256, 289, 1, 0, 0, 0, 1257, 1258, 3, 66, 25, 0, 1258, 1259, 1, 0, 0, 0, 1259, 1260, 6, 137, 11, 0, 1260, 291, 1, 0, 0, 0, 1261, 1262, 3, 68, 26, 0, 1262, 1263, 1, 0, 0, 0, 1263, 1264, 6, 138, 11, 0, 1264, 293, 1, 0, 0, 0, 1265, 1266, 3, 70, 27, 0, 1266, 1267, 1, 0, 0, 0, 1267, 1268, 6, 139, 11, 0, 1268, 295, 1, 0, 0, 0, 1269, 1270, 3, 74, 29, 0, 1270, 1271, 1, 0, 0, 0, 1271, 1272, 6, 140, 17, 0, 1272, 1273, 6, 140, 12, 0, 1273, 1274, 6, 140, 12, 0, 1274, 297, 1, 0, 0, 0, 1275, 1276, 3, 108, 46, 0, 1276, 1277, 1, 0, 0, 0, 1277, 1278, 6, 141, 20, 0, 1278, 299, 1, 0, 0, 0, 1279, 1280, 3, 112, 48, 0, 1280, 1281, 1, 0, 0, 0, 1281, 1282, 6, 142, 19, 0, 1282, 301, 1, 0, 0, 0, 1283, 1284, 3, 116, 50, 0, 1284, 1285, 1, 0, 0, 0, 1285, 1286, 6, 143, 23, 0, 1286, 303, 1, 0, 0, 0, 1287, 1288, 3, 282, 133, 0, 1288, 1289, 1, 0, 0, 0, 1289, 1290, 6, 144, 30, 0, 1290, 305, 1, 0, 0, 0, 1291, 1292, 3, 246, 115, 0, 1292, 1293, 1, 0, 0, 0, 1293, 1294, 6, 145, 26, 0, 1294, 307, 1, 0, 0, 0, 1295, 1296, 3, 186, 85, 0, 1296, 1297, 1, 0, 0, 0, 1297, 1298, 6, 146, 31, 0, 1298, 309, 1, 0, 0, 0, 1299, 1300, 4, 147, 13, 0, 1300, 1301, 3, 140, 62, 0, 1301, 1302, 1, 0, 0, 0, 1302, 1303, 6, 147, 24, 0, 1303, 311, 1, 0, 0, 0, 1304, 1305, 4, 148, 14, 0, 1305, 1306, 3, 176, 80, 0, 1306, 1307, 1, 0, 0, 0, 1307, 1308, 6, 148, 25, 0, 1308, 313, 1, 0, 0, 0, 1309, 1310, 3, 66, 25, 0, 1310, 1311, 1, 0, 0, 0, 1311, 1312, 6, 149, 11, 0, 1312, 315, 1, 0, 0, 0, 1313, 1314, 3, 68, 26, 0, 1314, 1315, 1, 0, 0, 0, 1315, 1316, 6, 150, 11, 0, 1316, 317, 1, 0, 0, 0, 1317, 1318, 3, 70, 27, 0, 1318, 1319, 1, 0, 0, 0, 1319, 1320, 6, 151, 11, 0, 1320, 319, 1, 0, 0, 0, 1321, 1322, 3, 74, 29, 0, 1322, 1323, 1, 0, 0, 0, 1323, 1324, 6, 152, 17, 0, 1324, 1325, 6, 152, 12, 0, 1325, 321, 1, 0, 0, 0, 1326, 1327, 3, 116, 50, 0, 1327, 1328, 1, 0, 0, 0, 1328, 1329, 6, 153, 23, 0, 1329, 323, 1, 0, 0, 0, 1330, 1331, 4, 154, 15, 0, 1331, 1332, 3, 140, 62, 0, 1332, 1333, 1, 0, 0, 0, 1333, 1334, 6, 154, 24, 0, 1334, 325, 1, 0, 0, 0, 1335, 1336, 4, 155, 16, 0, 1336, 1337, 3, 176, 80, 0, 1337, 1338, 1, 0, 0, 0, 1338, 1339, 6, 155, 25, 0, 1339, 327, 1, 0, 0, 0, 1340, 1341, 3, 186, 85, 0, 1341, 1342, 1, 0, 0, 0, 1342, 1343, 6, 156, 31, 0, 1343, 329, 1, 0, 0, 0, 1344, 1345, 3, 182, 83, 0, 1345, 1346, 1, 0, 0, 0, 1346, 1347, 6, 157, 32, 0, 1347, 331, 1, 0, 0, 0, 1348, 1349, 3, 66, 25, 0, 1349, 1350, 1, 0, 0, 0, 1350, 1351, 6, 158, 11, 0, 1351, 333, 1, 0, 0, 0, 1352, 1353, 3, 68, 26, 0, 1353, 1354, 1, 0, 0, 0, 1354, 1355, 6, 159, 11, 0, 1355, 335, 1, 0, 0, 0, 1356, 1357, 3, 70, 27, 0, 1357, 1358, 1, 0, 0, 0, 1358, 1359, 6, 160, 11, 0, 1359, 337, 1, 0, 0, 0, 1360, 1361, 3, 74, 29, 0, 1361, 1362, 1, 0, 0, 0, 1362, 1363, 6, 161, 17, 0, 1363, 1364, 6, 161, 12, 0, 1364, 339, 1, 0, 0, 0, 1365, 1366, 7, 1, 0, 0, 1366, 1367, 7, 9, 0, 0, 1367, 1368, 7, 15, 0, 0, 1368, 1369, 7, 7, 0, 0, 1369, 341, 1, 0, 0, 0, 1370, 1371, 3, 66, 25, 0, 1371, 1372, 1, 0, 0, 0, 1372, 1373, 6, 163, 11, 0, 1373, 343, 1, 0, 0, 0, 1374, 1375, 3, 68, 26, 0, 1375, 1376, 1, 0, 0, 0, 1376, 1377, 6, 164, 11, 0, 1377, 345, 1, 0, 0, 0, 1378, 1379, 3, 70, 27, 0, 1379, 1380, 1, 0, 0, 0, 1380, 1381, 6, 165, 11, 0, 1381, 347, 1, 0, 0, 0, 1382, 1383, 3, 180, 82, 0, 1383, 1384, 1, 0, 0, 0, 1384, 1385, 6, 166, 18, 0, 1385, 1386, 6, 166, 12, 0, 1386, 349, 1, 0, 0, 0, 1387, 1388, 3, 72, 28, 0, 1388, 1389, 1, 0, 0, 0, 1389, 1390, 6, 167, 13, 0, 1390, 351, 1, 0, 0, 0, 1391, 1397, 3, 86, 35, 0, 1392, 1397, 3, 76, 30, 0, 1393, 1397, 3, 116, 50, 0, 1394, 1397, 3, 78, 31, 0, 1395, 1397, 3, 92, 38, 0, 1396, 1391, 1, 0, 0, 0, 1396, 1392, 1, 0, 0, 0, 1396, 1393, 1, 0, 0, 0, 1396, 1394, 1, 0, 0, 0, 1396, 1395, 1, 0, 0, 0, 1397, 1398, 1, 0, 0, 0, 1398, 1396, 1, 0, 0, 0, 1398, 1399, 1, 0, 0, 0, 1399, 353, 1, 0, 0, 0, 1400, 1401, 3, 66, 25, 0, 1401, 1402, 1, 0, 0, 0, 1402, 1403, 6, 169, 11, 0, 1403, 355, 1, 0, 0, 0, 1404, 1405, 3, 68, 26, 0, 1405, 1406, 1, 0, 0, 0, 1406, 1407, 6, 170, 11, 0, 1407, 357, 1, 0, 0, 0, 1408, 1409, 3, 70, 27, 0, 1409, 1410, 1, 0, 0, 0, 1410, 1411, 6, 171, 11, 0, 1411, 359, 1, 0, 0, 0, 1412, 1413, 3, 74, 29, 0, 1413, 1414, 1, 0, 0, 0, 1414, 1415, 6, 172, 17, 0, 1415, 1416, 6, 172, 12, 0, 1416, 361, 1, 0, 0, 0, 1417, 1418, 3, 72, 28, 0, 1418, 1419, 1, 0, 0, 0, 1419, 1420, 6, 173, 13, 0, 1420, 363, 1, 0, 0, 0, 1421, 1422, 3, 112, 48, 0, 1422, 1423, 1, 0, 0, 0, 1423, 1424, 6, 174, 19, 0, 1424, 365, 1, 0, 0, 0, 1425, 1426, 3, 116, 50, 0, 1426, 1427, 1, 0, 0, 0, 1427, 1428, 6, 175, 23, 0, 1428, 367, 1, 0, 0, 0, 1429, 1430, 3, 280, 132, 0, 1430, 1431, 1, 0, 0, 0, 1431, 1432, 6, 176, 33, 0, 1432, 1433, 6, 176, 34, 0, 1433, 369, 1, 0, 0, 0, 1434, 1435, 3, 220, 102, 0, 1435, 1436, 1, 0, 0, 0, 1436, 1437, 6, 177, 21, 0, 1437, 371, 1, 0, 0, 0, 1438, 1439, 3, 96, 40, 0, 1439, 1440, 1, 0, 0, 0, 1440, 1441, 6, 178, 22, 0, 1441, 373, 1, 0, 0, 0, 1442, 1443, 3, 66, 25, 0, 1443, 1444, 1, 0, 0, 0, 1444, 1445, 6, 179, 11, 0, 1445, 375, 1, 0, 0, 0, 1446, 1447, 3, 68, 26, 0, 1447, 1448, 1, 0, 0, 0, 1448, 1449, 6, 180, 11, 0, 1449, 377, 1, 0, 0, 0, 1450, 1451, 3, 70, 27, 0, 1451, 1452, 1, 0, 0, 0, 1452, 1453, 6, 181, 11, 0, 1453, 379, 1, 0, 0, 0, 1454, 1455, 3, 74, 29, 0, 1455, 1456, 1, 0, 0, 0, 1456, 1457, 6, 182, 17, 0, 1457, 1458, 6, 182, 12, 0, 1458, 1459, 6, 182, 12, 0, 1459, 381, 1, 0, 0, 0, 1460, 1461, 3, 112, 48, 0, 1461, 1462, 1, 0, 0, 0, 1462, 1463, 6, 183, 19, 0, 1463, 383, 1, 0, 0, 0, 1464, 1465, 3, 116, 50, 0, 1465, 1466, 1, 0, 0, 0, 1466, 1467, 6, 184, 23, 0, 1467, 385, 1, 0, 0, 0, 1468, 1469, 3, 246, 115, 0, 1469, 1470, 1, 0, 0, 0, 1470, 1471, 6, 185, 26, 0, 1471, 387, 1, 0, 0, 0, 1472, 1473, 3, 66, 25, 0, 1473, 1474, 1, 0, 0, 0, 1474, 1475, 6, 186, 11, 0, 1475, 389, 1, 0, 0, 0, 1476, 1477, 3, 68, 26, 0, 1477, 1478, 1, 0, 0, 0, 1478, 1479, 6, 187, 11, 0, 1479, 391, 1, 0, 0, 0, 1480, 1481, 3, 70, 27, 0, 1481, 1482, 1, 0, 0, 0, 1482, 1483, 6, 188, 11, 0, 1483, 393, 1, 0, 0, 0, 1484, 1485, 3, 74, 29, 0, 1485, 1486, 1, 0, 0, 0, 1486, 1487, 6, 189, 17, 0, 1487, 1488, 6, 189, 12, 0, 1488, 395, 1, 0, 0, 0, 1489, 1490, 3, 54, 19, 0, 1490, 1491, 1, 0, 0, 0, 1491, 1492, 6, 190, 35, 0, 1492, 397, 1, 0, 0, 0, 1493, 1494, 3, 266, 125, 0, 1494, 1495, 1, 0, 0, 0, 1495, 1496, 6, 191, 36, 0, 1496, 399, 1, 0, 0, 0, 1497, 1498, 3, 280, 132, 0, 1498, 1499, 1, 0, 0, 0, 1499, 1500, 6, 192, 33, 0, 1500, 1501, 6, 192, 12, 0, 1501, 1502, 6, 192, 0, 0, 1502, 401, 1, 0, 0, 0, 1503, 1504, 7, 20, 0, 0, 1504, 1505, 7, 2, 0, 0, 1505, 1506, 7, 1, 0, 0, 1506, 1507, 7, 9, 0, 0, 1507, 1508, 7, 17, 0, 0, 1508, 1509, 1, 0, 0, 0, 1509, 1510, 6, 193, 12, 0, 1510, 1511, 6, 193, 0, 0, 1511, 403, 1, 0, 0, 0, 1512, 1513, 3, 182, 83, 0, 1513, 1514, 1, 0, 0, 0, 1514, 1515, 6, 194, 32, 0, 1515, 405, 1, 0, 0, 0, 1516, 1517, 3, 186, 85, 0, 1517, 1518, 1, 0, 0, 0, 1518, 1519, 6, 195, 31, 0, 1519, 407, 1, 0, 0, 0, 1520, 1521, 3, 66, 25, 0, 1521, 1522, 1, 0, 0, 0, 1522, 1523, 6, 196, 11, 0, 1523, 409, 1, 0, 0, 0, 1524, 1525, 3, 68, 26, 0, 1525, 1526, 1, 0, 0, 0, 1526, 1527, 6, 197, 11, 0, 1527, 411, 1, 0, 0, 0, 1528, 1529, 3, 70, 27, 0, 1529, 1530, 1, 0, 0, 0, 1530, 1531, 6, 198, 11, 0, 1531, 413, 1, 0, 0, 0, 1532, 1533, 3, 74, 29, 0, 1533, 1534, 1, 0, 0, 0, 1534, 1535, 6, 199, 17, 0, 1535, 1536, 6, 199, 12, 0, 1536, 415, 1, 0, 0, 0, 1537, 1538, 3, 220, 102, 0, 1538, 1539, 1, 0, 0, 0, 1539, 1540, 6, 200, 21, 0, 1540, 1541, 6, 200, 12, 0, 1541, 1542, 6, 200, 37, 0, 1542, 417, 1, 0, 0, 0, 1543, 1544, 3, 96, 40, 0, 1544, 1545, 1, 0, 0, 0, 1545, 1546, 6, 201, 22, 0, 1546, 1547, 6, 201, 12, 0, 1547, 1548, 6, 201, 37, 0, 1548, 419, 1, 0, 0, 0, 1549, 1550, 3, 66, 25, 0, 1550, 1551, 1, 0, 0, 0, 1551, 1552, 6, 202, 11, 0, 1552, 421, 1, 0, 0, 0, 1553, 1554, 3, 68, 26, 0, 1554, 1555, 1, 0, 0, 0, 1555, 1556, 6, 203, 11, 0, 1556, 423, 1, 0, 0, 0, 1557, 1558, 3, 70, 27, 0, 1558, 1559, 1, 0, 0, 0, 1559, 1560, 6, 204, 11, 0, 1560, 425, 1, 0, 0, 0, 1561, 1562, 3, 72, 28, 0, 1562, 1563, 1, 0, 0, 0, 1563, 1564, 6, 205, 13, 0, 1564, 1565, 6, 205, 12, 0, 1565, 1566, 6, 205, 9, 0, 1566, 427, 1, 0, 0, 0, 1567, 1568, 3, 112, 48, 0, 1568, 1569, 1, 0, 0, 0, 1569, 1570, 6, 206, 19, 0, 1570, 1571, 6, 206, 12, 0, 1571, 1572, 6, 206, 9, 0, 1572, 429, 1, 0, 0, 0, 1573, 1574, 3, 66, 25, 0, 1574, 1575, 1, 0, 0, 0, 1575, 1576, 6, 207, 11, 0, 1576, 431, 1, 0, 0, 0, 1577, 1578, 3, 68, 26, 0, 1578, 1579, 1, 0, 0, 0, 1579, 1580, 6, 208, 11, 0, 1580, 433, 1, 0, 0, 0, 1581, 1582, 3, 70, 27, 0, 1582, 1583, 1, 0, 0, 0, 1583, 1584, 6, 209, 11, 0, 1584, 435, 1, 0, 0, 0, 1585, 1586, 3, 186, 85, 0, 1586, 1587, 1, 0, 0, 0, 1587, 1588, 6, 210, 12, 0, 1588, 1589, 6, 210, 0, 0, 1589, 1590, 6, 210, 31, 0, 1590, 437, 1, 0, 0, 0, 1591, 1592, 3, 182, 83, 0, 1592, 1593, 1, 0, 0, 0, 1593, 1594, 6, 211, 12, 0, 1594, 1595, 6, 211, 0, 0, 1595, 1596, 6, 211, 32, 0, 1596, 439, 1, 0, 0, 0, 1597, 1598, 3, 102, 43, 0, 1598, 1599, 1, 0, 0, 0, 1599, 1600, 6, 212, 12, 0, 1600, 1601, 6, 212, 0, 0, 1601, 1602, 6, 212, 38, 0, 1602, 441, 1, 0, 0, 0, 1603, 1604, 3, 74, 29, 0, 1604, 1605, 1, 0, 0, 0, 1605, 1606, 6, 213, 17, 0, 1606, 1607, 6, 213, 12, 0, 1607, 443, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 657, 667, 671, 674, 683, 685, 696, 717, 722, 731, 738, 743, 745, 756, 764, 767, 769, 774, 779, 785, 792, 797, 803, 806, 814, 818, 949, 954, 961, 963, 979, 984, 989, 991, 997, 1074, 1079, 1128, 1132, 1137, 1142, 1147, 1149, 1153, 1155, 1242, 1246, 1251, 1396, 1398, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 29, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 30, 0, 7, 71, 0, 7, 39, 0, 7, 37, 0, 7, 81, 0, 7, 31, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 34, 0] \ No newline at end of file +[4, 0, 128, 1601, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 654, 8, 24, 11, 24, 12, 24, 655, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 664, 8, 25, 10, 25, 12, 25, 667, 9, 25, 1, 25, 3, 25, 670, 8, 25, 1, 25, 3, 25, 673, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 682, 8, 26, 10, 26, 12, 26, 685, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 693, 8, 27, 11, 27, 12, 27, 694, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 714, 8, 33, 1, 33, 4, 33, 717, 8, 33, 11, 33, 12, 33, 718, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 728, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 735, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 740, 8, 39, 10, 39, 12, 39, 743, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 751, 8, 39, 10, 39, 12, 39, 754, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 761, 8, 39, 1, 39, 3, 39, 764, 8, 39, 3, 39, 766, 8, 39, 1, 40, 4, 40, 769, 8, 40, 11, 40, 12, 40, 770, 1, 41, 4, 41, 774, 8, 41, 11, 41, 12, 41, 775, 1, 41, 1, 41, 5, 41, 780, 8, 41, 10, 41, 12, 41, 783, 9, 41, 1, 41, 1, 41, 4, 41, 787, 8, 41, 11, 41, 12, 41, 788, 1, 41, 4, 41, 792, 8, 41, 11, 41, 12, 41, 793, 1, 41, 1, 41, 5, 41, 798, 8, 41, 10, 41, 12, 41, 801, 9, 41, 3, 41, 803, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 809, 8, 41, 11, 41, 12, 41, 810, 1, 41, 1, 41, 3, 41, 815, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 3, 79, 943, 8, 79, 1, 79, 5, 79, 946, 8, 79, 10, 79, 12, 79, 949, 9, 79, 1, 79, 1, 79, 4, 79, 953, 8, 79, 11, 79, 12, 79, 954, 3, 79, 957, 8, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 5, 82, 971, 8, 82, 10, 82, 12, 82, 974, 9, 82, 1, 82, 1, 82, 3, 82, 978, 8, 82, 1, 82, 4, 82, 981, 8, 82, 11, 82, 12, 82, 982, 3, 82, 985, 8, 82, 1, 83, 1, 83, 4, 83, 989, 8, 83, 11, 83, 12, 83, 990, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 3, 100, 1068, 8, 100, 1, 101, 4, 101, 1071, 8, 101, 11, 101, 12, 101, 1072, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 3, 112, 1122, 8, 112, 1, 113, 1, 113, 3, 113, 1126, 8, 113, 1, 113, 5, 113, 1129, 8, 113, 10, 113, 12, 113, 1132, 9, 113, 1, 113, 1, 113, 3, 113, 1136, 8, 113, 1, 113, 4, 113, 1139, 8, 113, 11, 113, 12, 113, 1140, 3, 113, 1143, 8, 113, 1, 114, 1, 114, 4, 114, 1147, 8, 114, 11, 114, 12, 114, 1148, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 4, 134, 1234, 8, 134, 11, 134, 12, 134, 1235, 1, 134, 1, 134, 3, 134, 1240, 8, 134, 1, 134, 4, 134, 1243, 8, 134, 11, 134, 12, 134, 1244, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 4, 167, 1390, 8, 167, 11, 167, 12, 167, 1391, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 2, 683, 752, 0, 213, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 69, 176, 70, 178, 71, 180, 72, 182, 0, 184, 73, 186, 74, 188, 75, 190, 76, 192, 0, 194, 0, 196, 77, 198, 78, 200, 79, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 80, 216, 0, 218, 81, 220, 0, 222, 0, 224, 82, 226, 83, 228, 84, 230, 0, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 88, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 89, 266, 0, 268, 90, 270, 91, 272, 92, 274, 0, 276, 0, 278, 93, 280, 94, 282, 0, 284, 95, 286, 0, 288, 96, 290, 97, 292, 98, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 99, 314, 100, 316, 101, 318, 0, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 102, 332, 103, 334, 104, 336, 0, 338, 105, 340, 106, 342, 107, 344, 108, 346, 0, 348, 0, 350, 109, 352, 110, 354, 111, 356, 112, 358, 0, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 113, 374, 114, 376, 115, 378, 0, 380, 0, 382, 0, 384, 0, 386, 116, 388, 117, 390, 118, 392, 0, 394, 0, 396, 0, 398, 0, 400, 119, 402, 0, 404, 0, 406, 120, 408, 121, 410, 122, 412, 0, 414, 0, 416, 0, 418, 123, 420, 124, 422, 125, 424, 0, 426, 0, 428, 126, 430, 127, 432, 128, 434, 0, 436, 0, 438, 0, 440, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1628, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 244, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 8, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 9, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 10, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 11, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 12, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 13, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 14, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 15, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 16, 442, 1, 0, 0, 0, 18, 452, 1, 0, 0, 0, 20, 459, 1, 0, 0, 0, 22, 468, 1, 0, 0, 0, 24, 475, 1, 0, 0, 0, 26, 485, 1, 0, 0, 0, 28, 492, 1, 0, 0, 0, 30, 499, 1, 0, 0, 0, 32, 506, 1, 0, 0, 0, 34, 514, 1, 0, 0, 0, 36, 526, 1, 0, 0, 0, 38, 535, 1, 0, 0, 0, 40, 541, 1, 0, 0, 0, 42, 548, 1, 0, 0, 0, 44, 555, 1, 0, 0, 0, 46, 563, 1, 0, 0, 0, 48, 571, 1, 0, 0, 0, 50, 586, 1, 0, 0, 0, 52, 598, 1, 0, 0, 0, 54, 609, 1, 0, 0, 0, 56, 617, 1, 0, 0, 0, 58, 625, 1, 0, 0, 0, 60, 633, 1, 0, 0, 0, 62, 642, 1, 0, 0, 0, 64, 653, 1, 0, 0, 0, 66, 659, 1, 0, 0, 0, 68, 676, 1, 0, 0, 0, 70, 692, 1, 0, 0, 0, 72, 698, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 704, 1, 0, 0, 0, 78, 706, 1, 0, 0, 0, 80, 709, 1, 0, 0, 0, 82, 711, 1, 0, 0, 0, 84, 720, 1, 0, 0, 0, 86, 722, 1, 0, 0, 0, 88, 727, 1, 0, 0, 0, 90, 729, 1, 0, 0, 0, 92, 734, 1, 0, 0, 0, 94, 765, 1, 0, 0, 0, 96, 768, 1, 0, 0, 0, 98, 814, 1, 0, 0, 0, 100, 816, 1, 0, 0, 0, 102, 819, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 829, 1, 0, 0, 0, 110, 832, 1, 0, 0, 0, 112, 834, 1, 0, 0, 0, 114, 836, 1, 0, 0, 0, 116, 841, 1, 0, 0, 0, 118, 843, 1, 0, 0, 0, 120, 849, 1, 0, 0, 0, 122, 855, 1, 0, 0, 0, 124, 858, 1, 0, 0, 0, 126, 861, 1, 0, 0, 0, 128, 866, 1, 0, 0, 0, 130, 871, 1, 0, 0, 0, 132, 873, 1, 0, 0, 0, 134, 877, 1, 0, 0, 0, 136, 882, 1, 0, 0, 0, 138, 888, 1, 0, 0, 0, 140, 891, 1, 0, 0, 0, 142, 893, 1, 0, 0, 0, 144, 899, 1, 0, 0, 0, 146, 901, 1, 0, 0, 0, 148, 906, 1, 0, 0, 0, 150, 909, 1, 0, 0, 0, 152, 912, 1, 0, 0, 0, 154, 915, 1, 0, 0, 0, 156, 917, 1, 0, 0, 0, 158, 920, 1, 0, 0, 0, 160, 922, 1, 0, 0, 0, 162, 925, 1, 0, 0, 0, 164, 927, 1, 0, 0, 0, 166, 929, 1, 0, 0, 0, 168, 931, 1, 0, 0, 0, 170, 933, 1, 0, 0, 0, 172, 935, 1, 0, 0, 0, 174, 956, 1, 0, 0, 0, 176, 958, 1, 0, 0, 0, 178, 963, 1, 0, 0, 0, 180, 984, 1, 0, 0, 0, 182, 986, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1000, 1, 0, 0, 0, 190, 1004, 1, 0, 0, 0, 192, 1008, 1, 0, 0, 0, 194, 1013, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1022, 1, 0, 0, 0, 200, 1026, 1, 0, 0, 0, 202, 1030, 1, 0, 0, 0, 204, 1035, 1, 0, 0, 0, 206, 1039, 1, 0, 0, 0, 208, 1043, 1, 0, 0, 0, 210, 1047, 1, 0, 0, 0, 212, 1051, 1, 0, 0, 0, 214, 1055, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1070, 1, 0, 0, 0, 220, 1074, 1, 0, 0, 0, 222, 1078, 1, 0, 0, 0, 224, 1082, 1, 0, 0, 0, 226, 1086, 1, 0, 0, 0, 228, 1090, 1, 0, 0, 0, 230, 1094, 1, 0, 0, 0, 232, 1099, 1, 0, 0, 0, 234, 1103, 1, 0, 0, 0, 236, 1107, 1, 0, 0, 0, 238, 1112, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1142, 1, 0, 0, 0, 244, 1146, 1, 0, 0, 0, 246, 1150, 1, 0, 0, 0, 248, 1154, 1, 0, 0, 0, 250, 1158, 1, 0, 0, 0, 252, 1162, 1, 0, 0, 0, 254, 1167, 1, 0, 0, 0, 256, 1171, 1, 0, 0, 0, 258, 1175, 1, 0, 0, 0, 260, 1179, 1, 0, 0, 0, 262, 1184, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1192, 1, 0, 0, 0, 268, 1196, 1, 0, 0, 0, 270, 1200, 1, 0, 0, 0, 272, 1204, 1, 0, 0, 0, 274, 1208, 1, 0, 0, 0, 276, 1213, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1250, 1, 0, 0, 0, 290, 1254, 1, 0, 0, 0, 292, 1258, 1, 0, 0, 0, 294, 1262, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1276, 1, 0, 0, 0, 302, 1280, 1, 0, 0, 0, 304, 1284, 1, 0, 0, 0, 306, 1288, 1, 0, 0, 0, 308, 1292, 1, 0, 0, 0, 310, 1297, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1306, 1, 0, 0, 0, 316, 1310, 1, 0, 0, 0, 318, 1314, 1, 0, 0, 0, 320, 1319, 1, 0, 0, 0, 322, 1323, 1, 0, 0, 0, 324, 1328, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1337, 1, 0, 0, 0, 330, 1341, 1, 0, 0, 0, 332, 1345, 1, 0, 0, 0, 334, 1349, 1, 0, 0, 0, 336, 1353, 1, 0, 0, 0, 338, 1358, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1367, 1, 0, 0, 0, 344, 1371, 1, 0, 0, 0, 346, 1375, 1, 0, 0, 0, 348, 1380, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1393, 1, 0, 0, 0, 354, 1397, 1, 0, 0, 0, 356, 1401, 1, 0, 0, 0, 358, 1405, 1, 0, 0, 0, 360, 1410, 1, 0, 0, 0, 362, 1414, 1, 0, 0, 0, 364, 1418, 1, 0, 0, 0, 366, 1422, 1, 0, 0, 0, 368, 1427, 1, 0, 0, 0, 370, 1431, 1, 0, 0, 0, 372, 1435, 1, 0, 0, 0, 374, 1439, 1, 0, 0, 0, 376, 1443, 1, 0, 0, 0, 378, 1447, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1461, 1, 0, 0, 0, 386, 1465, 1, 0, 0, 0, 388, 1469, 1, 0, 0, 0, 390, 1473, 1, 0, 0, 0, 392, 1477, 1, 0, 0, 0, 394, 1482, 1, 0, 0, 0, 396, 1486, 1, 0, 0, 0, 398, 1490, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1505, 1, 0, 0, 0, 404, 1509, 1, 0, 0, 0, 406, 1513, 1, 0, 0, 0, 408, 1517, 1, 0, 0, 0, 410, 1521, 1, 0, 0, 0, 412, 1525, 1, 0, 0, 0, 414, 1530, 1, 0, 0, 0, 416, 1536, 1, 0, 0, 0, 418, 1542, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1550, 1, 0, 0, 0, 424, 1554, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1566, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1574, 1, 0, 0, 0, 434, 1578, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1590, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 443, 7, 0, 0, 0, 443, 444, 7, 1, 0, 0, 444, 445, 7, 2, 0, 0, 445, 446, 7, 2, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 4, 0, 0, 448, 449, 7, 5, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 0, 0, 0, 451, 17, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 8, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 1, 1, 0, 458, 19, 1, 0, 0, 0, 459, 460, 7, 3, 0, 0, 460, 461, 7, 9, 0, 0, 461, 462, 7, 6, 0, 0, 462, 463, 7, 1, 0, 0, 463, 464, 7, 4, 0, 0, 464, 465, 7, 10, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 2, 2, 0, 467, 21, 1, 0, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 11, 0, 0, 470, 471, 7, 12, 0, 0, 471, 472, 7, 13, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 3, 0, 0, 474, 23, 1, 0, 0, 0, 475, 476, 7, 3, 0, 0, 476, 477, 7, 14, 0, 0, 477, 478, 7, 8, 0, 0, 478, 479, 7, 13, 0, 0, 479, 480, 7, 12, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 4, 3, 0, 484, 25, 1, 0, 0, 0, 485, 486, 7, 15, 0, 0, 486, 487, 7, 6, 0, 0, 487, 488, 7, 7, 0, 0, 488, 489, 7, 16, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 5, 4, 0, 491, 27, 1, 0, 0, 0, 492, 493, 7, 17, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 7, 0, 0, 495, 496, 7, 18, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 6, 0, 0, 498, 29, 1, 0, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 7, 3, 0, 0, 502, 503, 7, 8, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 7, 1, 0, 505, 31, 1, 0, 0, 0, 506, 507, 7, 13, 0, 0, 507, 508, 7, 1, 0, 0, 508, 509, 7, 16, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 5, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 8, 0, 0, 513, 33, 1, 0, 0, 0, 514, 515, 7, 16, 0, 0, 515, 516, 7, 11, 0, 0, 516, 517, 5, 95, 0, 0, 517, 518, 7, 3, 0, 0, 518, 519, 7, 14, 0, 0, 519, 520, 7, 8, 0, 0, 520, 521, 7, 12, 0, 0, 521, 522, 7, 9, 0, 0, 522, 523, 7, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 9, 5, 0, 525, 35, 1, 0, 0, 0, 526, 527, 7, 6, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 9, 0, 0, 529, 530, 7, 12, 0, 0, 530, 531, 7, 16, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 10, 6, 0, 534, 37, 1, 0, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 7, 0, 0, 537, 538, 7, 19, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 6, 11, 0, 0, 540, 39, 1, 0, 0, 0, 541, 542, 7, 2, 0, 0, 542, 543, 7, 10, 0, 0, 543, 544, 7, 7, 0, 0, 544, 545, 7, 19, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 12, 7, 0, 547, 41, 1, 0, 0, 0, 548, 549, 7, 2, 0, 0, 549, 550, 7, 7, 0, 0, 550, 551, 7, 6, 0, 0, 551, 552, 7, 5, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 13, 0, 0, 554, 43, 1, 0, 0, 0, 555, 556, 7, 2, 0, 0, 556, 557, 7, 5, 0, 0, 557, 558, 7, 12, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 14, 0, 0, 562, 45, 1, 0, 0, 0, 563, 564, 7, 19, 0, 0, 564, 565, 7, 10, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 6, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 6, 15, 0, 0, 570, 47, 1, 0, 0, 0, 571, 572, 4, 16, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 9, 0, 0, 574, 575, 7, 13, 0, 0, 575, 576, 7, 1, 0, 0, 576, 577, 7, 9, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 7, 2, 0, 0, 579, 580, 7, 5, 0, 0, 580, 581, 7, 12, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 16, 0, 0, 585, 49, 1, 0, 0, 0, 586, 587, 4, 17, 1, 0, 587, 588, 7, 13, 0, 0, 588, 589, 7, 7, 0, 0, 589, 590, 7, 7, 0, 0, 590, 591, 7, 18, 0, 0, 591, 592, 7, 20, 0, 0, 592, 593, 7, 8, 0, 0, 593, 594, 5, 95, 0, 0, 594, 595, 5, 128020, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 17, 8, 0, 597, 51, 1, 0, 0, 0, 598, 599, 4, 18, 2, 0, 599, 600, 7, 16, 0, 0, 600, 601, 7, 3, 0, 0, 601, 602, 7, 5, 0, 0, 602, 603, 7, 6, 0, 0, 603, 604, 7, 1, 0, 0, 604, 605, 7, 4, 0, 0, 605, 606, 7, 2, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 18, 9, 0, 608, 53, 1, 0, 0, 0, 609, 610, 4, 19, 3, 0, 610, 611, 7, 21, 0, 0, 611, 612, 7, 7, 0, 0, 612, 613, 7, 1, 0, 0, 613, 614, 7, 9, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 19, 10, 0, 616, 55, 1, 0, 0, 0, 617, 618, 4, 20, 4, 0, 618, 619, 7, 15, 0, 0, 619, 620, 7, 20, 0, 0, 620, 621, 7, 13, 0, 0, 621, 622, 7, 13, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 6, 20, 10, 0, 624, 57, 1, 0, 0, 0, 625, 626, 4, 21, 5, 0, 626, 627, 7, 13, 0, 0, 627, 628, 7, 3, 0, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 5, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 21, 10, 0, 632, 59, 1, 0, 0, 0, 633, 634, 4, 22, 6, 0, 634, 635, 7, 6, 0, 0, 635, 636, 7, 1, 0, 0, 636, 637, 7, 17, 0, 0, 637, 638, 7, 10, 0, 0, 638, 639, 7, 5, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 22, 10, 0, 641, 61, 1, 0, 0, 0, 642, 643, 4, 23, 7, 0, 643, 644, 7, 13, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 7, 0, 0, 646, 647, 7, 18, 0, 0, 647, 648, 7, 20, 0, 0, 648, 649, 7, 8, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 23, 10, 0, 651, 63, 1, 0, 0, 0, 652, 654, 8, 22, 0, 0, 653, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 24, 0, 0, 658, 65, 1, 0, 0, 0, 659, 660, 5, 47, 0, 0, 660, 661, 5, 47, 0, 0, 661, 665, 1, 0, 0, 0, 662, 664, 8, 23, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 670, 5, 13, 0, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 5, 10, 0, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 675, 6, 25, 11, 0, 675, 67, 1, 0, 0, 0, 676, 677, 5, 47, 0, 0, 677, 678, 5, 42, 0, 0, 678, 683, 1, 0, 0, 0, 679, 682, 3, 68, 26, 0, 680, 682, 9, 0, 0, 0, 681, 679, 1, 0, 0, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 684, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 687, 5, 42, 0, 0, 687, 688, 5, 47, 0, 0, 688, 689, 1, 0, 0, 0, 689, 690, 6, 26, 11, 0, 690, 69, 1, 0, 0, 0, 691, 693, 7, 24, 0, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 697, 6, 27, 11, 0, 697, 71, 1, 0, 0, 0, 698, 699, 5, 124, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 28, 12, 0, 701, 73, 1, 0, 0, 0, 702, 703, 7, 25, 0, 0, 703, 75, 1, 0, 0, 0, 704, 705, 7, 26, 0, 0, 705, 77, 1, 0, 0, 0, 706, 707, 5, 92, 0, 0, 707, 708, 7, 27, 0, 0, 708, 79, 1, 0, 0, 0, 709, 710, 8, 28, 0, 0, 710, 81, 1, 0, 0, 0, 711, 713, 7, 3, 0, 0, 712, 714, 7, 29, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 3, 74, 29, 0, 716, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 83, 1, 0, 0, 0, 720, 721, 5, 64, 0, 0, 721, 85, 1, 0, 0, 0, 722, 723, 5, 96, 0, 0, 723, 87, 1, 0, 0, 0, 724, 728, 8, 30, 0, 0, 725, 726, 5, 96, 0, 0, 726, 728, 5, 96, 0, 0, 727, 724, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 728, 89, 1, 0, 0, 0, 729, 730, 5, 95, 0, 0, 730, 91, 1, 0, 0, 0, 731, 735, 3, 76, 30, 0, 732, 735, 3, 74, 29, 0, 733, 735, 3, 90, 37, 0, 734, 731, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 733, 1, 0, 0, 0, 735, 93, 1, 0, 0, 0, 736, 741, 5, 34, 0, 0, 737, 740, 3, 78, 31, 0, 738, 740, 3, 80, 32, 0, 739, 737, 1, 0, 0, 0, 739, 738, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 744, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 744, 766, 5, 34, 0, 0, 745, 746, 5, 34, 0, 0, 746, 747, 5, 34, 0, 0, 747, 748, 5, 34, 0, 0, 748, 752, 1, 0, 0, 0, 749, 751, 8, 23, 0, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 753, 755, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 760, 1, 0, 0, 0, 759, 761, 5, 34, 0, 0, 760, 759, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 764, 5, 34, 0, 0, 763, 762, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 766, 1, 0, 0, 0, 765, 736, 1, 0, 0, 0, 765, 745, 1, 0, 0, 0, 766, 95, 1, 0, 0, 0, 767, 769, 3, 74, 29, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 97, 1, 0, 0, 0, 772, 774, 3, 74, 29, 0, 773, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 773, 1, 0, 0, 0, 775, 776, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 781, 3, 116, 50, 0, 778, 780, 3, 74, 29, 0, 779, 778, 1, 0, 0, 0, 780, 783, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 815, 1, 0, 0, 0, 783, 781, 1, 0, 0, 0, 784, 786, 3, 116, 50, 0, 785, 787, 3, 74, 29, 0, 786, 785, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 815, 1, 0, 0, 0, 790, 792, 3, 74, 29, 0, 791, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 802, 1, 0, 0, 0, 795, 799, 3, 116, 50, 0, 796, 798, 3, 74, 29, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 3, 82, 33, 0, 805, 815, 1, 0, 0, 0, 806, 808, 3, 116, 50, 0, 807, 809, 3, 74, 29, 0, 808, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 810, 811, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 813, 3, 82, 33, 0, 813, 815, 1, 0, 0, 0, 814, 773, 1, 0, 0, 0, 814, 784, 1, 0, 0, 0, 814, 791, 1, 0, 0, 0, 814, 806, 1, 0, 0, 0, 815, 99, 1, 0, 0, 0, 816, 817, 7, 31, 0, 0, 817, 818, 7, 32, 0, 0, 818, 101, 1, 0, 0, 0, 819, 820, 7, 12, 0, 0, 820, 821, 7, 9, 0, 0, 821, 822, 7, 0, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 2, 0, 0, 825, 826, 7, 4, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 5, 61, 0, 0, 828, 107, 1, 0, 0, 0, 829, 830, 5, 58, 0, 0, 830, 831, 5, 58, 0, 0, 831, 109, 1, 0, 0, 0, 832, 833, 5, 58, 0, 0, 833, 111, 1, 0, 0, 0, 834, 835, 5, 44, 0, 0, 835, 113, 1, 0, 0, 0, 836, 837, 7, 0, 0, 0, 837, 838, 7, 3, 0, 0, 838, 839, 7, 2, 0, 0, 839, 840, 7, 4, 0, 0, 840, 115, 1, 0, 0, 0, 841, 842, 5, 46, 0, 0, 842, 117, 1, 0, 0, 0, 843, 844, 7, 15, 0, 0, 844, 845, 7, 12, 0, 0, 845, 846, 7, 13, 0, 0, 846, 847, 7, 2, 0, 0, 847, 848, 7, 3, 0, 0, 848, 119, 1, 0, 0, 0, 849, 850, 7, 15, 0, 0, 850, 851, 7, 1, 0, 0, 851, 852, 7, 6, 0, 0, 852, 853, 7, 2, 0, 0, 853, 854, 7, 5, 0, 0, 854, 121, 1, 0, 0, 0, 855, 856, 7, 1, 0, 0, 856, 857, 7, 9, 0, 0, 857, 123, 1, 0, 0, 0, 858, 859, 7, 1, 0, 0, 859, 860, 7, 2, 0, 0, 860, 125, 1, 0, 0, 0, 861, 862, 7, 13, 0, 0, 862, 863, 7, 12, 0, 0, 863, 864, 7, 2, 0, 0, 864, 865, 7, 5, 0, 0, 865, 127, 1, 0, 0, 0, 866, 867, 7, 13, 0, 0, 867, 868, 7, 1, 0, 0, 868, 869, 7, 18, 0, 0, 869, 870, 7, 3, 0, 0, 870, 129, 1, 0, 0, 0, 871, 872, 5, 40, 0, 0, 872, 131, 1, 0, 0, 0, 873, 874, 7, 9, 0, 0, 874, 875, 7, 7, 0, 0, 875, 876, 7, 5, 0, 0, 876, 133, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 20, 0, 0, 879, 880, 7, 13, 0, 0, 880, 881, 7, 13, 0, 0, 881, 135, 1, 0, 0, 0, 882, 883, 7, 9, 0, 0, 883, 884, 7, 20, 0, 0, 884, 885, 7, 13, 0, 0, 885, 886, 7, 13, 0, 0, 886, 887, 7, 2, 0, 0, 887, 137, 1, 0, 0, 0, 888, 889, 7, 7, 0, 0, 889, 890, 7, 6, 0, 0, 890, 139, 1, 0, 0, 0, 891, 892, 5, 63, 0, 0, 892, 141, 1, 0, 0, 0, 893, 894, 7, 6, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 1, 0, 0, 896, 897, 7, 18, 0, 0, 897, 898, 7, 3, 0, 0, 898, 143, 1, 0, 0, 0, 899, 900, 5, 41, 0, 0, 900, 145, 1, 0, 0, 0, 901, 902, 7, 5, 0, 0, 902, 903, 7, 6, 0, 0, 903, 904, 7, 20, 0, 0, 904, 905, 7, 3, 0, 0, 905, 147, 1, 0, 0, 0, 906, 907, 5, 61, 0, 0, 907, 908, 5, 61, 0, 0, 908, 149, 1, 0, 0, 0, 909, 910, 5, 61, 0, 0, 910, 911, 5, 126, 0, 0, 911, 151, 1, 0, 0, 0, 912, 913, 5, 33, 0, 0, 913, 914, 5, 61, 0, 0, 914, 153, 1, 0, 0, 0, 915, 916, 5, 60, 0, 0, 916, 155, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 919, 5, 61, 0, 0, 919, 157, 1, 0, 0, 0, 920, 921, 5, 62, 0, 0, 921, 159, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 924, 5, 61, 0, 0, 924, 161, 1, 0, 0, 0, 925, 926, 5, 43, 0, 0, 926, 163, 1, 0, 0, 0, 927, 928, 5, 45, 0, 0, 928, 165, 1, 0, 0, 0, 929, 930, 5, 42, 0, 0, 930, 167, 1, 0, 0, 0, 931, 932, 5, 47, 0, 0, 932, 169, 1, 0, 0, 0, 933, 934, 5, 37, 0, 0, 934, 171, 1, 0, 0, 0, 935, 936, 3, 46, 15, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 78, 13, 0, 938, 173, 1, 0, 0, 0, 939, 942, 3, 140, 62, 0, 940, 943, 3, 76, 30, 0, 941, 943, 3, 90, 37, 0, 942, 940, 1, 0, 0, 0, 942, 941, 1, 0, 0, 0, 943, 947, 1, 0, 0, 0, 944, 946, 3, 92, 38, 0, 945, 944, 1, 0, 0, 0, 946, 949, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 957, 1, 0, 0, 0, 949, 947, 1, 0, 0, 0, 950, 952, 3, 140, 62, 0, 951, 953, 3, 74, 29, 0, 952, 951, 1, 0, 0, 0, 953, 954, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 957, 1, 0, 0, 0, 956, 939, 1, 0, 0, 0, 956, 950, 1, 0, 0, 0, 957, 175, 1, 0, 0, 0, 958, 959, 5, 91, 0, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 80, 0, 0, 961, 962, 6, 80, 0, 0, 962, 177, 1, 0, 0, 0, 963, 964, 5, 93, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 81, 12, 0, 966, 967, 6, 81, 12, 0, 967, 179, 1, 0, 0, 0, 968, 972, 3, 76, 30, 0, 969, 971, 3, 92, 38, 0, 970, 969, 1, 0, 0, 0, 971, 974, 1, 0, 0, 0, 972, 970, 1, 0, 0, 0, 972, 973, 1, 0, 0, 0, 973, 985, 1, 0, 0, 0, 974, 972, 1, 0, 0, 0, 975, 978, 3, 90, 37, 0, 976, 978, 3, 84, 34, 0, 977, 975, 1, 0, 0, 0, 977, 976, 1, 0, 0, 0, 978, 980, 1, 0, 0, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 985, 1, 0, 0, 0, 984, 968, 1, 0, 0, 0, 984, 977, 1, 0, 0, 0, 985, 181, 1, 0, 0, 0, 986, 988, 3, 86, 35, 0, 987, 989, 3, 88, 36, 0, 988, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 3, 86, 35, 0, 993, 183, 1, 0, 0, 0, 994, 995, 3, 182, 83, 0, 995, 185, 1, 0, 0, 0, 996, 997, 3, 66, 25, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 85, 11, 0, 999, 187, 1, 0, 0, 0, 1000, 1001, 3, 68, 26, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 86, 11, 0, 1003, 189, 1, 0, 0, 0, 1004, 1005, 3, 70, 27, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 87, 11, 0, 1007, 191, 1, 0, 0, 0, 1008, 1009, 3, 176, 80, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 88, 14, 0, 1011, 1012, 6, 88, 15, 0, 1012, 193, 1, 0, 0, 0, 1013, 1014, 3, 72, 28, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 89, 16, 0, 1016, 1017, 6, 89, 12, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 11, 0, 1021, 197, 1, 0, 0, 0, 1022, 1023, 3, 66, 25, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 91, 11, 0, 1025, 199, 1, 0, 0, 0, 1026, 1027, 3, 68, 26, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 92, 11, 0, 1029, 201, 1, 0, 0, 0, 1030, 1031, 3, 72, 28, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 93, 16, 0, 1033, 1034, 6, 93, 12, 0, 1034, 203, 1, 0, 0, 0, 1035, 1036, 3, 176, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 94, 14, 0, 1038, 205, 1, 0, 0, 0, 1039, 1040, 3, 178, 81, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 95, 17, 0, 1042, 207, 1, 0, 0, 0, 1043, 1044, 3, 110, 47, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 96, 18, 0, 1046, 209, 1, 0, 0, 0, 1047, 1048, 3, 112, 48, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 97, 19, 0, 1050, 211, 1, 0, 0, 0, 1051, 1052, 3, 106, 45, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 98, 20, 0, 1054, 213, 1, 0, 0, 0, 1055, 1056, 7, 16, 0, 0, 1056, 1057, 7, 3, 0, 0, 1057, 1058, 7, 5, 0, 0, 1058, 1059, 7, 12, 0, 0, 1059, 1060, 7, 0, 0, 0, 1060, 1061, 7, 12, 0, 0, 1061, 1062, 7, 5, 0, 0, 1062, 1063, 7, 12, 0, 0, 1063, 215, 1, 0, 0, 0, 1064, 1068, 8, 33, 0, 0, 1065, 1066, 5, 47, 0, 0, 1066, 1068, 8, 34, 0, 0, 1067, 1064, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 217, 1, 0, 0, 0, 1069, 1071, 3, 216, 100, 0, 1070, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1075, 3, 218, 101, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 102, 21, 0, 1077, 221, 1, 0, 0, 0, 1078, 1079, 3, 94, 39, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 103, 22, 0, 1081, 223, 1, 0, 0, 0, 1082, 1083, 3, 66, 25, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 104, 11, 0, 1085, 225, 1, 0, 0, 0, 1086, 1087, 3, 68, 26, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 105, 11, 0, 1089, 227, 1, 0, 0, 0, 1090, 1091, 3, 70, 27, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 106, 11, 0, 1093, 229, 1, 0, 0, 0, 1094, 1095, 3, 72, 28, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 107, 16, 0, 1097, 1098, 6, 107, 12, 0, 1098, 231, 1, 0, 0, 0, 1099, 1100, 3, 116, 50, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 108, 23, 0, 1102, 233, 1, 0, 0, 0, 1103, 1104, 3, 112, 48, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 109, 19, 0, 1106, 235, 1, 0, 0, 0, 1107, 1108, 4, 110, 8, 0, 1108, 1109, 3, 140, 62, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 110, 24, 0, 1111, 237, 1, 0, 0, 0, 1112, 1113, 4, 111, 9, 0, 1113, 1114, 3, 174, 79, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 25, 0, 1116, 239, 1, 0, 0, 0, 1117, 1122, 3, 76, 30, 0, 1118, 1122, 3, 74, 29, 0, 1119, 1122, 3, 90, 37, 0, 1120, 1122, 3, 166, 75, 0, 1121, 1117, 1, 0, 0, 0, 1121, 1118, 1, 0, 0, 0, 1121, 1119, 1, 0, 0, 0, 1121, 1120, 1, 0, 0, 0, 1122, 241, 1, 0, 0, 0, 1123, 1126, 3, 76, 30, 0, 1124, 1126, 3, 166, 75, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1130, 1, 0, 0, 0, 1127, 1129, 3, 240, 112, 0, 1128, 1127, 1, 0, 0, 0, 1129, 1132, 1, 0, 0, 0, 1130, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1143, 1, 0, 0, 0, 1132, 1130, 1, 0, 0, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 84, 34, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1138, 1, 0, 0, 0, 1137, 1139, 3, 240, 112, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1143, 1, 0, 0, 0, 1142, 1125, 1, 0, 0, 0, 1142, 1135, 1, 0, 0, 0, 1143, 243, 1, 0, 0, 0, 1144, 1147, 3, 242, 113, 0, 1145, 1147, 3, 182, 83, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 245, 1, 0, 0, 0, 1150, 1151, 3, 66, 25, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 115, 11, 0, 1153, 247, 1, 0, 0, 0, 1154, 1155, 3, 68, 26, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 116, 11, 0, 1157, 249, 1, 0, 0, 0, 1158, 1159, 3, 70, 27, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 117, 11, 0, 1161, 251, 1, 0, 0, 0, 1162, 1163, 3, 72, 28, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 118, 16, 0, 1165, 1166, 6, 118, 12, 0, 1166, 253, 1, 0, 0, 0, 1167, 1168, 3, 106, 45, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 119, 20, 0, 1170, 255, 1, 0, 0, 0, 1171, 1172, 3, 112, 48, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 120, 19, 0, 1174, 257, 1, 0, 0, 0, 1175, 1176, 3, 116, 50, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 121, 23, 0, 1178, 259, 1, 0, 0, 0, 1179, 1180, 4, 122, 10, 0, 1180, 1181, 3, 140, 62, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 122, 24, 0, 1183, 261, 1, 0, 0, 0, 1184, 1185, 4, 123, 11, 0, 1185, 1186, 3, 174, 79, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 25, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 7, 12, 0, 0, 1190, 1191, 7, 2, 0, 0, 1191, 265, 1, 0, 0, 0, 1192, 1193, 3, 244, 114, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 125, 26, 0, 1195, 267, 1, 0, 0, 0, 1196, 1197, 3, 66, 25, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 126, 11, 0, 1199, 269, 1, 0, 0, 0, 1200, 1201, 3, 68, 26, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 127, 11, 0, 1203, 271, 1, 0, 0, 0, 1204, 1205, 3, 70, 27, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 128, 11, 0, 1207, 273, 1, 0, 0, 0, 1208, 1209, 3, 72, 28, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 129, 16, 0, 1211, 1212, 6, 129, 12, 0, 1212, 275, 1, 0, 0, 0, 1213, 1214, 3, 176, 80, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 130, 14, 0, 1216, 1217, 6, 130, 27, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 7, 7, 0, 0, 1219, 1220, 7, 9, 0, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 131, 28, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 7, 19, 0, 0, 1224, 1225, 7, 1, 0, 0, 1225, 1226, 7, 5, 0, 0, 1226, 1227, 7, 10, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 8, 35, 0, 0, 1231, 283, 1, 0, 0, 0, 1232, 1234, 3, 282, 133, 0, 1233, 1232, 1, 0, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1233, 1, 0, 0, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 3, 110, 47, 0, 1238, 1240, 1, 0, 0, 0, 1239, 1233, 1, 0, 0, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1242, 1, 0, 0, 0, 1241, 1243, 3, 282, 133, 0, 1242, 1241, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 3, 284, 134, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 135, 29, 0, 1249, 287, 1, 0, 0, 0, 1250, 1251, 3, 66, 25, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 136, 11, 0, 1253, 289, 1, 0, 0, 0, 1254, 1255, 3, 68, 26, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 137, 11, 0, 1257, 291, 1, 0, 0, 0, 1258, 1259, 3, 70, 27, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 138, 11, 0, 1261, 293, 1, 0, 0, 0, 1262, 1263, 3, 72, 28, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 139, 16, 0, 1265, 1266, 6, 139, 12, 0, 1266, 1267, 6, 139, 12, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 106, 45, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 20, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 112, 48, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 19, 0, 1275, 299, 1, 0, 0, 0, 1276, 1277, 3, 116, 50, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 142, 23, 0, 1279, 301, 1, 0, 0, 0, 1280, 1281, 3, 280, 132, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 143, 30, 0, 1283, 303, 1, 0, 0, 0, 1284, 1285, 3, 244, 114, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 144, 26, 0, 1287, 305, 1, 0, 0, 0, 1288, 1289, 3, 184, 84, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 145, 31, 0, 1291, 307, 1, 0, 0, 0, 1292, 1293, 4, 146, 12, 0, 1293, 1294, 3, 140, 62, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 146, 24, 0, 1296, 309, 1, 0, 0, 0, 1297, 1298, 4, 147, 13, 0, 1298, 1299, 3, 174, 79, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 25, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 3, 66, 25, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 148, 11, 0, 1305, 313, 1, 0, 0, 0, 1306, 1307, 3, 68, 26, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 149, 11, 0, 1309, 315, 1, 0, 0, 0, 1310, 1311, 3, 70, 27, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 150, 11, 0, 1313, 317, 1, 0, 0, 0, 1314, 1315, 3, 72, 28, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 151, 16, 0, 1317, 1318, 6, 151, 12, 0, 1318, 319, 1, 0, 0, 0, 1319, 1320, 3, 116, 50, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 152, 23, 0, 1322, 321, 1, 0, 0, 0, 1323, 1324, 4, 153, 14, 0, 1324, 1325, 3, 140, 62, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 24, 0, 1327, 323, 1, 0, 0, 0, 1328, 1329, 4, 154, 15, 0, 1329, 1330, 3, 174, 79, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 25, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 3, 184, 84, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 155, 31, 0, 1336, 327, 1, 0, 0, 0, 1337, 1338, 3, 180, 82, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 156, 32, 0, 1340, 329, 1, 0, 0, 0, 1341, 1342, 3, 66, 25, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 157, 11, 0, 1344, 331, 1, 0, 0, 0, 1345, 1346, 3, 68, 26, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 158, 11, 0, 1348, 333, 1, 0, 0, 0, 1349, 1350, 3, 70, 27, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 159, 11, 0, 1352, 335, 1, 0, 0, 0, 1353, 1354, 3, 72, 28, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 160, 16, 0, 1356, 1357, 6, 160, 12, 0, 1357, 337, 1, 0, 0, 0, 1358, 1359, 7, 1, 0, 0, 1359, 1360, 7, 9, 0, 0, 1360, 1361, 7, 15, 0, 0, 1361, 1362, 7, 7, 0, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 66, 25, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 11, 0, 1366, 341, 1, 0, 0, 0, 1367, 1368, 3, 68, 26, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 163, 11, 0, 1370, 343, 1, 0, 0, 0, 1371, 1372, 3, 70, 27, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 164, 11, 0, 1374, 345, 1, 0, 0, 0, 1375, 1376, 3, 178, 81, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 165, 17, 0, 1378, 1379, 6, 165, 12, 0, 1379, 347, 1, 0, 0, 0, 1380, 1381, 3, 110, 47, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 166, 18, 0, 1383, 349, 1, 0, 0, 0, 1384, 1390, 3, 84, 34, 0, 1385, 1390, 3, 74, 29, 0, 1386, 1390, 3, 116, 50, 0, 1387, 1390, 3, 76, 30, 0, 1388, 1390, 3, 90, 37, 0, 1389, 1384, 1, 0, 0, 0, 1389, 1385, 1, 0, 0, 0, 1389, 1386, 1, 0, 0, 0, 1389, 1387, 1, 0, 0, 0, 1389, 1388, 1, 0, 0, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1389, 1, 0, 0, 0, 1391, 1392, 1, 0, 0, 0, 1392, 351, 1, 0, 0, 0, 1393, 1394, 3, 66, 25, 0, 1394, 1395, 1, 0, 0, 0, 1395, 1396, 6, 168, 11, 0, 1396, 353, 1, 0, 0, 0, 1397, 1398, 3, 68, 26, 0, 1398, 1399, 1, 0, 0, 0, 1399, 1400, 6, 169, 11, 0, 1400, 355, 1, 0, 0, 0, 1401, 1402, 3, 70, 27, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 170, 11, 0, 1404, 357, 1, 0, 0, 0, 1405, 1406, 3, 72, 28, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 171, 16, 0, 1408, 1409, 6, 171, 12, 0, 1409, 359, 1, 0, 0, 0, 1410, 1411, 3, 110, 47, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 172, 18, 0, 1413, 361, 1, 0, 0, 0, 1414, 1415, 3, 112, 48, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 173, 19, 0, 1417, 363, 1, 0, 0, 0, 1418, 1419, 3, 116, 50, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 174, 23, 0, 1421, 365, 1, 0, 0, 0, 1422, 1423, 3, 278, 131, 0, 1423, 1424, 1, 0, 0, 0, 1424, 1425, 6, 175, 33, 0, 1425, 1426, 6, 175, 34, 0, 1426, 367, 1, 0, 0, 0, 1427, 1428, 3, 218, 101, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 176, 21, 0, 1430, 369, 1, 0, 0, 0, 1431, 1432, 3, 94, 39, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 177, 22, 0, 1434, 371, 1, 0, 0, 0, 1435, 1436, 3, 66, 25, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 178, 11, 0, 1438, 373, 1, 0, 0, 0, 1439, 1440, 3, 68, 26, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 179, 11, 0, 1442, 375, 1, 0, 0, 0, 1443, 1444, 3, 70, 27, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 180, 11, 0, 1446, 377, 1, 0, 0, 0, 1447, 1448, 3, 72, 28, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 181, 16, 0, 1450, 1451, 6, 181, 12, 0, 1451, 1452, 6, 181, 12, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 112, 48, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 19, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 116, 50, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 23, 0, 1460, 383, 1, 0, 0, 0, 1461, 1462, 3, 244, 114, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 184, 26, 0, 1464, 385, 1, 0, 0, 0, 1465, 1466, 3, 66, 25, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 185, 11, 0, 1468, 387, 1, 0, 0, 0, 1469, 1470, 3, 68, 26, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 186, 11, 0, 1472, 389, 1, 0, 0, 0, 1473, 1474, 3, 70, 27, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 187, 11, 0, 1476, 391, 1, 0, 0, 0, 1477, 1478, 3, 72, 28, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 188, 16, 0, 1480, 1481, 6, 188, 12, 0, 1481, 393, 1, 0, 0, 0, 1482, 1483, 3, 54, 19, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 189, 35, 0, 1485, 395, 1, 0, 0, 0, 1486, 1487, 3, 264, 124, 0, 1487, 1488, 1, 0, 0, 0, 1488, 1489, 6, 190, 36, 0, 1489, 397, 1, 0, 0, 0, 1490, 1491, 3, 278, 131, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 6, 191, 33, 0, 1493, 1494, 6, 191, 12, 0, 1494, 1495, 6, 191, 0, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 7, 20, 0, 0, 1497, 1498, 7, 2, 0, 0, 1498, 1499, 7, 1, 0, 0, 1499, 1500, 7, 9, 0, 0, 1500, 1501, 7, 17, 0, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 12, 0, 1503, 1504, 6, 192, 0, 0, 1504, 401, 1, 0, 0, 0, 1505, 1506, 3, 180, 82, 0, 1506, 1507, 1, 0, 0, 0, 1507, 1508, 6, 193, 32, 0, 1508, 403, 1, 0, 0, 0, 1509, 1510, 3, 184, 84, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 194, 31, 0, 1512, 405, 1, 0, 0, 0, 1513, 1514, 3, 66, 25, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 195, 11, 0, 1516, 407, 1, 0, 0, 0, 1517, 1518, 3, 68, 26, 0, 1518, 1519, 1, 0, 0, 0, 1519, 1520, 6, 196, 11, 0, 1520, 409, 1, 0, 0, 0, 1521, 1522, 3, 70, 27, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 197, 11, 0, 1524, 411, 1, 0, 0, 0, 1525, 1526, 3, 72, 28, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 198, 16, 0, 1528, 1529, 6, 198, 12, 0, 1529, 413, 1, 0, 0, 0, 1530, 1531, 3, 218, 101, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1533, 6, 199, 21, 0, 1533, 1534, 6, 199, 12, 0, 1534, 1535, 6, 199, 37, 0, 1535, 415, 1, 0, 0, 0, 1536, 1537, 3, 94, 39, 0, 1537, 1538, 1, 0, 0, 0, 1538, 1539, 6, 200, 22, 0, 1539, 1540, 6, 200, 12, 0, 1540, 1541, 6, 200, 37, 0, 1541, 417, 1, 0, 0, 0, 1542, 1543, 3, 66, 25, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 201, 11, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 68, 26, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 11, 0, 1549, 421, 1, 0, 0, 0, 1550, 1551, 3, 70, 27, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 203, 11, 0, 1553, 423, 1, 0, 0, 0, 1554, 1555, 3, 110, 47, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 204, 18, 0, 1557, 1558, 6, 204, 12, 0, 1558, 1559, 6, 204, 9, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 112, 48, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 19, 0, 1563, 1564, 6, 205, 12, 0, 1564, 1565, 6, 205, 9, 0, 1565, 427, 1, 0, 0, 0, 1566, 1567, 3, 66, 25, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 206, 11, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 68, 26, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 11, 0, 1573, 431, 1, 0, 0, 0, 1574, 1575, 3, 70, 27, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 208, 11, 0, 1577, 433, 1, 0, 0, 0, 1578, 1579, 3, 184, 84, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 209, 12, 0, 1581, 1582, 6, 209, 0, 0, 1582, 1583, 6, 209, 31, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 180, 82, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 12, 0, 1587, 1588, 6, 210, 0, 0, 1588, 1589, 6, 210, 32, 0, 1589, 437, 1, 0, 0, 0, 1590, 1591, 3, 100, 42, 0, 1591, 1592, 1, 0, 0, 0, 1592, 1593, 6, 211, 12, 0, 1593, 1594, 6, 211, 0, 0, 1594, 1595, 6, 211, 38, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 72, 28, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 16, 0, 1599, 1600, 6, 212, 12, 0, 1600, 441, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 655, 665, 669, 672, 681, 683, 694, 713, 718, 727, 734, 739, 741, 752, 760, 763, 765, 770, 775, 781, 788, 793, 799, 802, 810, 814, 942, 947, 954, 956, 972, 977, 982, 984, 990, 1067, 1072, 1121, 1125, 1130, 1135, 1140, 1142, 1146, 1148, 1235, 1239, 1244, 1389, 1391, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 29, 0, 7, 71, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 81, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 915264f21910f..f04582e820e28 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -29,9 +29,9 @@ public class EsqlBaseLexer extends LexerConfig { LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, - UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, COLON=29, - PIPE=30, QUOTED_STRING=31, INTEGER_LITERAL=32, DECIMAL_LITERAL=33, BY=34, - AND=35, ASC=36, ASSIGN=37, CAST_OP=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, @@ -73,22 +73,22 @@ private static String[] makeRuleNames() { "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", "UNKNOWN_CMD", - "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", "DIGIT", - "LETTER", "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", - "BACKQUOTE", "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", + "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "DIGIT", "LETTER", + "ESCAPE_SEQUENCE", "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", + "BACKQUOTE_BLOCK", "UNDERSCORE", "UNQUOTED_ID_BODY", "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", - "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", - "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", - "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "EXPRESSION_COLON", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", - "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", - "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", - "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", - "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", - "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", - "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", - "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", + "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", + "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", + "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", + "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", @@ -125,8 +125,8 @@ private static String[] makeLiteralNames() { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "':'", "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, @@ -144,28 +144,28 @@ private static String[] makeSymbolicNames() { "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", - "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", - "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", - "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", - "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", - "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", - "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", - "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", - "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -246,23 +246,21 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); case 23: return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 78: - return EXPRESSION_COLON_sempred((RuleContext)_localctx, predIndex); - case 111: + case 110: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 112: + case 111: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 123: + case 122: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 124: + case 123: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 147: + case 146: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 148: + case 147: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 154: + case 153: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 155: + case 154: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -323,72 +321,65 @@ private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean EXPRESSION_COLON_sempred(RuleContext _localctx, int predIndex) { - switch (predIndex) { - case 8: - return this.isDevVersion(); - } - return true; - } private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 9: + case 8: return this.isDevVersion(); } return true; } private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 9: return this.isDevVersion(); } return true; } private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 10: return this.isDevVersion(); } return true; } private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 11: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 13: + case 12: return this.isDevVersion(); } return true; } private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 14: + case 13: return this.isDevVersion(); } return true; } private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 15: + case 14: return this.isDevVersion(); } return true; } private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { - case 16: + case 15: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0000\u0080\u0648\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0080\u0641\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -450,991 +441,987 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ - "\u00d4\u0002\u00d5\u0007\u00d5\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u00d4\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ + "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+ "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0018\u0004\u0018\u0290\b\u0018\u000b\u0018\f\u0018\u0291"+ - "\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ - "\u0005\u0019\u029a\b\u0019\n\u0019\f\u0019\u029d\t\u0019\u0001\u0019\u0003"+ - "\u0019\u02a0\b\u0019\u0001\u0019\u0003\u0019\u02a3\b\u0019\u0001\u0019"+ - "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0005\u001a\u02ac\b\u001a\n\u001a\f\u001a\u02af\t\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b7"+ - "\b\u001b\u000b\u001b\f\u001b\u02b8\u0001\u001b\u0001\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001e"+ - "\u0001\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001 \u0001!\u0001"+ - "!\u0001\"\u0001\"\u0003\"\u02ce\b\"\u0001\"\u0004\"\u02d1\b\"\u000b\""+ - "\f\"\u02d2\u0001#\u0001#\u0001$\u0001$\u0001%\u0001%\u0001%\u0003%\u02dc"+ - "\b%\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0003\'\u02e3\b\'\u0001(\u0001"+ - "(\u0001(\u0005(\u02e8\b(\n(\f(\u02eb\t(\u0001(\u0001(\u0001(\u0001(\u0001"+ - "(\u0001(\u0005(\u02f3\b(\n(\f(\u02f6\t(\u0001(\u0001(\u0001(\u0001(\u0001"+ - "(\u0003(\u02fd\b(\u0001(\u0003(\u0300\b(\u0003(\u0302\b(\u0001)\u0004"+ - ")\u0305\b)\u000b)\f)\u0306\u0001*\u0004*\u030a\b*\u000b*\f*\u030b\u0001"+ - "*\u0001*\u0005*\u0310\b*\n*\f*\u0313\t*\u0001*\u0001*\u0004*\u0317\b*"+ - "\u000b*\f*\u0318\u0001*\u0004*\u031c\b*\u000b*\f*\u031d\u0001*\u0001*"+ - "\u0005*\u0322\b*\n*\f*\u0325\t*\u0003*\u0327\b*\u0001*\u0001*\u0001*\u0001"+ - "*\u0004*\u032d\b*\u000b*\f*\u032e\u0001*\u0001*\u0003*\u0333\b*\u0001"+ - "+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001-\u0001-\u0001-\u0001"+ - "-\u0001.\u0001.\u0001/\u0001/\u0001/\u00010\u00010\u00011\u00011\u0001"+ - "1\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u00013\u0001"+ - "3\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u00015\u0001"+ - "6\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u00018\u0001"+ - "8\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001;\u0001"+ - ";\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001?\u0001"+ - "?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001B\u0001"+ - "B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001F\u0001"+ - "F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ - "J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001N\u0001"+ - "N\u0001N\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0003P\u03b6"+ - "\bP\u0001P\u0005P\u03b9\bP\nP\fP\u03bc\tP\u0001P\u0001P\u0004P\u03c0\b"+ - "P\u000bP\fP\u03c1\u0003P\u03c4\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ - "R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0005S\u03d2\bS\nS\fS\u03d5"+ - "\tS\u0001S\u0001S\u0003S\u03d9\bS\u0001S\u0004S\u03dc\bS\u000bS\fS\u03dd"+ - "\u0003S\u03e0\bS\u0001T\u0001T\u0004T\u03e4\bT\u000bT\fT\u03e5\u0001T"+ - "\u0001T\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001"+ - "W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ - "\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ - "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001"+ - "`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001"+ - "c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001d\u0001"+ - "d\u0001d\u0001e\u0001e\u0001e\u0003e\u0433\be\u0001f\u0004f\u0436\bf\u000b"+ - "f\ff\u0437\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ + "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004"+ + "\u0018\u028e\b\u0018\u000b\u0018\f\u0018\u028f\u0001\u0018\u0001\u0018"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0298\b\u0019"+ + "\n\u0019\f\u0019\u029b\t\u0019\u0001\u0019\u0003\u0019\u029e\b\u0019\u0001"+ + "\u0019\u0003\u0019\u02a1\b\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u02aa\b\u001a\n"+ + "\u001a\f\u001a\u02ad\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b5\b\u001b\u000b\u001b\f"+ + "\u001b\u02b6\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ + "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0003!\u02ca\b!\u0001"+ + "!\u0004!\u02cd\b!\u000b!\f!\u02ce\u0001\"\u0001\"\u0001#\u0001#\u0001"+ + "$\u0001$\u0001$\u0003$\u02d8\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003"+ + "&\u02df\b&\u0001\'\u0001\'\u0001\'\u0005\'\u02e4\b\'\n\'\f\'\u02e7\t\'"+ + "\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0005\'\u02ef\b\'\n\'"+ + "\f\'\u02f2\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u02f9\b"+ + "\'\u0001\'\u0003\'\u02fc\b\'\u0003\'\u02fe\b\'\u0001(\u0004(\u0301\b("+ + "\u000b(\f(\u0302\u0001)\u0004)\u0306\b)\u000b)\f)\u0307\u0001)\u0001)"+ + "\u0005)\u030c\b)\n)\f)\u030f\t)\u0001)\u0001)\u0004)\u0313\b)\u000b)\f"+ + ")\u0314\u0001)\u0004)\u0318\b)\u000b)\f)\u0319\u0001)\u0001)\u0005)\u031e"+ + "\b)\n)\f)\u0321\t)\u0003)\u0323\b)\u0001)\u0001)\u0001)\u0001)\u0004)"+ + "\u0329\b)\u000b)\f)\u032a\u0001)\u0001)\u0003)\u032f\b)\u0001*\u0001*"+ + "\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ + "-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u0001"+ + "1\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ + "3\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ + "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u0001"+ + "8\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ + ";\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ + "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001"+ + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ + "F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001"+ + "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ + "N\u0001N\u0001O\u0001O\u0001O\u0003O\u03af\bO\u0001O\u0005O\u03b2\bO\n"+ + "O\fO\u03b5\tO\u0001O\u0001O\u0004O\u03b9\bO\u000bO\fO\u03ba\u0003O\u03bd"+ + "\bO\u0001P\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ + "Q\u0001R\u0001R\u0005R\u03cb\bR\nR\fR\u03ce\tR\u0001R\u0001R\u0003R\u03d2"+ + "\bR\u0001R\u0004R\u03d5\bR\u000bR\fR\u03d6\u0003R\u03d9\bR\u0001S\u0001"+ + "S\u0004S\u03dd\bS\u000bS\fS\u03de\u0001S\u0001S\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ + "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ + "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ + "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ + "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ + "c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0003"+ + "d\u042c\bd\u0001e\u0004e\u042f\be\u000be\fe\u0430\u0001f\u0001f\u0001"+ + "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ - "m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001o\u0001"+ - "p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0003q\u0469"+ - "\bq\u0001r\u0001r\u0003r\u046d\br\u0001r\u0005r\u0470\br\nr\fr\u0473\t"+ - "r\u0001r\u0001r\u0003r\u0477\br\u0001r\u0004r\u047a\br\u000br\fr\u047b"+ - "\u0003r\u047e\br\u0001s\u0001s\u0004s\u0482\bs\u000bs\fs\u0483\u0001t"+ - "\u0001t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001"+ - "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ - "x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ - "}\u0001}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f"+ - "\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081"+ - "\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001\u0082"+ - "\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083"+ - "\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084"+ - "\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085"+ - "\u0001\u0085\u0001\u0086\u0001\u0086\u0001\u0087\u0004\u0087\u04d9\b\u0087"+ - "\u000b\u0087\f\u0087\u04da\u0001\u0087\u0001\u0087\u0003\u0087\u04df\b"+ - "\u0087\u0001\u0087\u0004\u0087\u04e2\b\u0087\u000b\u0087\f\u0087\u04e3"+ - "\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089"+ - "\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a"+ - "\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001\u008c"+ - "\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d"+ - "\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e"+ - "\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090"+ - "\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091"+ - "\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001\u0093"+ - "\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094"+ - "\u0001\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095"+ - "\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097"+ - "\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001\u0098"+ - "\u0001\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u009a"+ - "\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b"+ - "\u0001\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c"+ - "\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e"+ - "\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f"+ - "\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a1"+ - "\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2"+ - "\u0001\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3"+ - "\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5"+ - "\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001\u00a6"+ - "\u0001\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7"+ - "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0004\u00a8"+ - "\u0575\b\u00a8\u000b\u00a8\f\u00a8\u0576\u0001\u00a9\u0001\u00a9\u0001"+ - "\u00a9\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ - "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ - "\u00ac\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ - "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ - "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ - "\u00b0\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ - "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ - "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ - "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ - "\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001"+ - "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ - "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ - "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ - "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ - "\u00bd\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ - "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ - "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ - "\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ - "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ - "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ - "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ - "\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ - "\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001"+ - "\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001"+ - "\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001"+ - "\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ - "\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001"+ - "\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001"+ - "\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001"+ - "\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ - "\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001"+ - "\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001"+ - "\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ - "\u00d5\u0001\u00d5\u0002\u02ad\u02f4\u0000\u00d6\u0010\u0001\u0012\u0002"+ - "\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b "+ - "\t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u00124\u00136\u00148"+ - "\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001cH\u001dJ\u001e"+ - "L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u0000"+ - "`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u0088"+ - "3\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c"+ - "=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac\u0000\u00ae\u0000"+ - "\u00b0E\u00b2F\u00b4G\u00b6H\u00b8\u0000\u00baI\u00bcJ\u00beK\u00c0L\u00c2"+ - "\u0000\u00c4\u0000\u00c6M\u00c8N\u00caO\u00cc\u0000\u00ce\u0000\u00d0"+ - "\u0000\u00d2\u0000\u00d4\u0000\u00d6\u0000\u00d8P\u00da\u0000\u00dcQ\u00de"+ - "\u0000\u00e0\u0000\u00e2R\u00e4S\u00e6T\u00e8\u0000\u00ea\u0000\u00ec"+ - "\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4\u0000\u00f6U\u00f8V\u00fa"+ - "W\u00fcX\u00fe\u0000\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108"+ - "\u0000\u010aY\u010c\u0000\u010eZ\u0110[\u0112\\\u0114\u0000\u0116\u0000"+ - "\u0118]\u011a^\u011c\u0000\u011e_\u0120\u0000\u0122`\u0124a\u0126b\u0128"+ - "\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134"+ - "\u0000\u0136\u0000\u0138\u0000\u013ac\u013cd\u013ee\u0140\u0000\u0142"+ - "\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a\u0000\u014cf\u014eg\u0150"+ - "h\u0152\u0000\u0154i\u0156j\u0158k\u015al\u015c\u0000\u015e\u0000\u0160"+ - "m\u0162n\u0164o\u0166p\u0168\u0000\u016a\u0000\u016c\u0000\u016e\u0000"+ - "\u0170\u0000\u0172\u0000\u0174\u0000\u0176q\u0178r\u017as\u017c\u0000"+ - "\u017e\u0000\u0180\u0000\u0182\u0000\u0184t\u0186u\u0188v\u018a\u0000"+ - "\u018c\u0000\u018e\u0000\u0190\u0000\u0192w\u0194\u0000\u0196\u0000\u0198"+ - "x\u019ay\u019cz\u019e\u0000\u01a0\u0000\u01a2\u0000\u01a4{\u01a6|\u01a8"+ - "}\u01aa\u0000\u01ac\u0000\u01ae~\u01b0\u007f\u01b2\u0080\u01b4\u0000\u01b6"+ - "\u0000\u01b8\u0000\u01ba\u0000\u0010\u0000\u0001\u0002\u0003\u0004\u0005"+ - "\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000DDdd\u0002\u0000"+ - "IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002\u0000TTtt\u0002"+ - "\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000NNnn\u0002\u0000"+ - "HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002\u0000XXxx\u0002"+ - "\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000KKkk\u0002\u0000"+ - "WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r\r //[[]]\u0002"+ - "\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\b\u0000"+ - "\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000++--\u0001"+ - "\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t\n\r\r \"\",,/"+ - "/::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,//::<<>?\\\\||\u0663"+ - "\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001\u0000\u0000\u0000"+ - "\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001\u0000\u0000\u0000"+ - "\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001\u0000\u0000\u0000"+ - "\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001\u0000\u0000\u0000"+ - "\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000\u0000\u0000\u0000"+ - "$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000\u0000(\u0001"+ - "\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,\u0001\u0000\u0000"+ - "\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000\u0000\u0000\u0000"+ - "2\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000\u00006\u0001"+ - "\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:\u0001\u0000\u0000"+ - "\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000\u0000\u0000\u0000"+ - "@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000\u0000D\u0001"+ - "\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H\u0001\u0000\u0000"+ - "\u0000\u0001J\u0001\u0000\u0000\u0000\u0001`\u0001\u0000\u0000\u0000\u0001"+ - "b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000\u0001f\u0001"+ - "\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j\u0001\u0000\u0000"+ - "\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000\u0000\u0000\u0001"+ - "p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000\u0001t\u0001"+ - "\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x\u0001\u0000\u0000"+ - "\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000\u0000\u0000\u0001"+ - "~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000\u0000\u0001\u0082"+ - "\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000\u0000\u0001\u0086"+ - "\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000\u0000\u0001\u008a"+ - "\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000\u0000\u0001\u008e"+ - "\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000\u0000\u0001\u0092"+ - "\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000\u0000\u0001\u0096"+ - "\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000\u0000\u0001\u009a"+ - "\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000\u0000\u0001\u009e"+ - "\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000\u0000\u0001\u00a2"+ - "\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000\u0000\u0001\u00a6"+ - "\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa"+ - "\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000\u0000\u0001\u00ae"+ - "\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000\u0000\u0001\u00b2"+ - "\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000\u0000\u0001\u00b6"+ - "\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000\u0000\u0001\u00bc"+ - "\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000\u0000\u0001\u00c0"+ - "\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000\u0000\u0002\u00c4"+ - "\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000\u0000\u0002\u00c8"+ - "\u0001\u0000\u0000\u0000\u0002\u00ca\u0001\u0000\u0000\u0000\u0003\u00cc"+ - "\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000\u0000\u0003\u00d0"+ - "\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000\u0000\u0003\u00d4"+ - "\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000\u0000\u0003\u00d8"+ - "\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000\u0000\u0003\u00de"+ - "\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000\u0000\u0003\u00e2"+ - "\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000\u0000\u0003\u00e6"+ - "\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000\u0000\u0004\u00ea"+ - "\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000\u0000\u0004\u00ee"+ - "\u0001\u0000\u0000\u0000\u0004\u00f0\u0001\u0000\u0000\u0000\u0004\u00f6"+ - "\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000\u0000\u0004\u00fa"+ - "\u0001\u0000\u0000\u0000\u0004\u00fc\u0001\u0000\u0000\u0000\u0005\u00fe"+ - "\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000\u0000\u0005\u0102"+ - "\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000\u0000\u0005\u0106"+ - "\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000\u0000\u0005\u010a"+ - "\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000\u0000\u0005\u010e"+ - "\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000\u0000\u0005\u0112"+ - "\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000\u0000\u0006\u0116"+ - "\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000\u0000\u0006\u011a"+ - "\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000\u0000\u0006\u0120"+ - "\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000\u0000\u0006\u0124"+ - "\u0001\u0000\u0000\u0000\u0006\u0126\u0001\u0000\u0000\u0000\u0007\u0128"+ - "\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000\u0000\u0007\u012c"+ - "\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000\u0000\u0007\u0130"+ - "\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000\u0000\u0007\u0134"+ - "\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000\u0000\u0007\u0138"+ - "\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000\u0000\u0007\u013c"+ - "\u0001\u0000\u0000\u0000\u0007\u013e\u0001\u0000\u0000\u0000\b\u0140\u0001"+ - "\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144\u0001\u0000"+ - "\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001\u0000\u0000"+ - "\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000\u0000\u0000"+ - "\b\u014e\u0001\u0000\u0000\u0000\b\u0150\u0001\u0000\u0000\u0000\t\u0152"+ - "\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000\t\u0156\u0001"+ - "\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\t\u015a\u0001\u0000"+ - "\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001\u0000\u0000"+ - "\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000\u0000\u0000"+ - "\n\u0164\u0001\u0000\u0000\u0000\n\u0166\u0001\u0000\u0000\u0000\u000b"+ - "\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000\u0000\u000b"+ - "\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000\u0000\u000b"+ - "\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000\u0000\u000b"+ - "\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000\u0000\u000b"+ - "\u0178\u0001\u0000\u0000\u0000\u000b\u017a\u0001\u0000\u0000\u0000\f\u017c"+ - "\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180\u0001"+ - "\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001\u0000"+ - "\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\f\u0188\u0001\u0000\u0000"+ - "\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000\u0000"+ - "\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000\r\u0192"+ - "\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196\u0001"+ - "\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001\u0000"+ - "\u0000\u0000\r\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001\u0000\u0000"+ - "\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001\u0000\u0000"+ - "\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001\u0000\u0000"+ - "\u0000\u000e\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001\u0000\u0000"+ - "\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001\u0000\u0000"+ - "\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001\u0000\u0000"+ - "\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001\u0000\u0000"+ - "\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u000f\u01ba\u0001\u0000\u0000"+ - "\u0000\u0010\u01bc\u0001\u0000\u0000\u0000\u0012\u01c6\u0001\u0000\u0000"+ - "\u0000\u0014\u01cd\u0001\u0000\u0000\u0000\u0016\u01d6\u0001\u0000\u0000"+ - "\u0000\u0018\u01dd\u0001\u0000\u0000\u0000\u001a\u01e7\u0001\u0000\u0000"+ - "\u0000\u001c\u01ee\u0001\u0000\u0000\u0000\u001e\u01f5\u0001\u0000\u0000"+ - "\u0000 \u01fc\u0001\u0000\u0000\u0000\"\u0204\u0001\u0000\u0000\u0000"+ - "$\u0210\u0001\u0000\u0000\u0000&\u0219\u0001\u0000\u0000\u0000(\u021f"+ - "\u0001\u0000\u0000\u0000*\u0226\u0001\u0000\u0000\u0000,\u022d\u0001\u0000"+ - "\u0000\u0000.\u0235\u0001\u0000\u0000\u00000\u023d\u0001\u0000\u0000\u0000"+ - "2\u024c\u0001\u0000\u0000\u00004\u0258\u0001\u0000\u0000\u00006\u0263"+ - "\u0001\u0000\u0000\u00008\u026b\u0001\u0000\u0000\u0000:\u0273\u0001\u0000"+ - "\u0000\u0000<\u027b\u0001\u0000\u0000\u0000>\u0284\u0001\u0000\u0000\u0000"+ - "@\u028f\u0001\u0000\u0000\u0000B\u0295\u0001\u0000\u0000\u0000D\u02a6"+ - "\u0001\u0000\u0000\u0000F\u02b6\u0001\u0000\u0000\u0000H\u02bc\u0001\u0000"+ - "\u0000\u0000J\u02be\u0001\u0000\u0000\u0000L\u02c2\u0001\u0000\u0000\u0000"+ - "N\u02c4\u0001\u0000\u0000\u0000P\u02c6\u0001\u0000\u0000\u0000R\u02c9"+ - "\u0001\u0000\u0000\u0000T\u02cb\u0001\u0000\u0000\u0000V\u02d4\u0001\u0000"+ - "\u0000\u0000X\u02d6\u0001\u0000\u0000\u0000Z\u02db\u0001\u0000\u0000\u0000"+ - "\\\u02dd\u0001\u0000\u0000\u0000^\u02e2\u0001\u0000\u0000\u0000`\u0301"+ - "\u0001\u0000\u0000\u0000b\u0304\u0001\u0000\u0000\u0000d\u0332\u0001\u0000"+ - "\u0000\u0000f\u0334\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000"+ - "j\u033b\u0001\u0000\u0000\u0000l\u033f\u0001\u0000\u0000\u0000n\u0341"+ - "\u0001\u0000\u0000\u0000p\u0344\u0001\u0000\u0000\u0000r\u0346\u0001\u0000"+ - "\u0000\u0000t\u034b\u0001\u0000\u0000\u0000v\u034d\u0001\u0000\u0000\u0000"+ - "x\u0353\u0001\u0000\u0000\u0000z\u0359\u0001\u0000\u0000\u0000|\u035c"+ - "\u0001\u0000\u0000\u0000~\u035f\u0001\u0000\u0000\u0000\u0080\u0364\u0001"+ - "\u0000\u0000\u0000\u0082\u0369\u0001\u0000\u0000\u0000\u0084\u036b\u0001"+ - "\u0000\u0000\u0000\u0086\u036f\u0001\u0000\u0000\u0000\u0088\u0374\u0001"+ - "\u0000\u0000\u0000\u008a\u037a\u0001\u0000\u0000\u0000\u008c\u037d\u0001"+ - "\u0000\u0000\u0000\u008e\u037f\u0001\u0000\u0000\u0000\u0090\u0385\u0001"+ - "\u0000\u0000\u0000\u0092\u0387\u0001\u0000\u0000\u0000\u0094\u038c\u0001"+ - "\u0000\u0000\u0000\u0096\u038f\u0001\u0000\u0000\u0000\u0098\u0392\u0001"+ - "\u0000\u0000\u0000\u009a\u0395\u0001\u0000\u0000\u0000\u009c\u0397\u0001"+ - "\u0000\u0000\u0000\u009e\u039a\u0001\u0000\u0000\u0000\u00a0\u039c\u0001"+ - "\u0000\u0000\u0000\u00a2\u039f\u0001\u0000\u0000\u0000\u00a4\u03a1\u0001"+ - "\u0000\u0000\u0000\u00a6\u03a3\u0001\u0000\u0000\u0000\u00a8\u03a5\u0001"+ - "\u0000\u0000\u0000\u00aa\u03a7\u0001\u0000\u0000\u0000\u00ac\u03a9\u0001"+ - "\u0000\u0000\u0000\u00ae\u03ae\u0001\u0000\u0000\u0000\u00b0\u03c3\u0001"+ - "\u0000\u0000\u0000\u00b2\u03c5\u0001\u0000\u0000\u0000\u00b4\u03ca\u0001"+ - "\u0000\u0000\u0000\u00b6\u03df\u0001\u0000\u0000\u0000\u00b8\u03e1\u0001"+ - "\u0000\u0000\u0000\u00ba\u03e9\u0001\u0000\u0000\u0000\u00bc\u03eb\u0001"+ - "\u0000\u0000\u0000\u00be\u03ef\u0001\u0000\u0000\u0000\u00c0\u03f3\u0001"+ - "\u0000\u0000\u0000\u00c2\u03f7\u0001\u0000\u0000\u0000\u00c4\u03fc\u0001"+ - "\u0000\u0000\u0000\u00c6\u0401\u0001\u0000\u0000\u0000\u00c8\u0405\u0001"+ - "\u0000\u0000\u0000\u00ca\u0409\u0001\u0000\u0000\u0000\u00cc\u040d\u0001"+ - "\u0000\u0000\u0000\u00ce\u0412\u0001\u0000\u0000\u0000\u00d0\u0416\u0001"+ - "\u0000\u0000\u0000\u00d2\u041a\u0001\u0000\u0000\u0000\u00d4\u041e\u0001"+ - "\u0000\u0000\u0000\u00d6\u0422\u0001\u0000\u0000\u0000\u00d8\u0426\u0001"+ - "\u0000\u0000\u0000\u00da\u0432\u0001\u0000\u0000\u0000\u00dc\u0435\u0001"+ - "\u0000\u0000\u0000\u00de\u0439\u0001\u0000\u0000\u0000\u00e0\u043d\u0001"+ - "\u0000\u0000\u0000\u00e2\u0441\u0001\u0000\u0000\u0000\u00e4\u0445\u0001"+ - "\u0000\u0000\u0000\u00e6\u0449\u0001\u0000\u0000\u0000\u00e8\u044d\u0001"+ - "\u0000\u0000\u0000\u00ea\u0452\u0001\u0000\u0000\u0000\u00ec\u0456\u0001"+ - "\u0000\u0000\u0000\u00ee\u045a\u0001\u0000\u0000\u0000\u00f0\u045f\u0001"+ - "\u0000\u0000\u0000\u00f2\u0468\u0001\u0000\u0000\u0000\u00f4\u047d\u0001"+ - "\u0000\u0000\u0000\u00f6\u0481\u0001\u0000\u0000\u0000\u00f8\u0485\u0001"+ - "\u0000\u0000\u0000\u00fa\u0489\u0001\u0000\u0000\u0000\u00fc\u048d\u0001"+ - "\u0000\u0000\u0000\u00fe\u0491\u0001\u0000\u0000\u0000\u0100\u0496\u0001"+ - "\u0000\u0000\u0000\u0102\u049a\u0001\u0000\u0000\u0000\u0104\u049e\u0001"+ - "\u0000\u0000\u0000\u0106\u04a2\u0001\u0000\u0000\u0000\u0108\u04a7\u0001"+ - "\u0000\u0000\u0000\u010a\u04ac\u0001\u0000\u0000\u0000\u010c\u04af\u0001"+ - "\u0000\u0000\u0000\u010e\u04b3\u0001\u0000\u0000\u0000\u0110\u04b7\u0001"+ - "\u0000\u0000\u0000\u0112\u04bb\u0001\u0000\u0000\u0000\u0114\u04bf\u0001"+ - "\u0000\u0000\u0000\u0116\u04c4\u0001\u0000\u0000\u0000\u0118\u04c9\u0001"+ - "\u0000\u0000\u0000\u011a\u04ce\u0001\u0000\u0000\u0000\u011c\u04d5\u0001"+ - "\u0000\u0000\u0000\u011e\u04de\u0001\u0000\u0000\u0000\u0120\u04e5\u0001"+ - "\u0000\u0000\u0000\u0122\u04e9\u0001\u0000\u0000\u0000\u0124\u04ed\u0001"+ - "\u0000\u0000\u0000\u0126\u04f1\u0001\u0000\u0000\u0000\u0128\u04f5\u0001"+ - "\u0000\u0000\u0000\u012a\u04fb\u0001\u0000\u0000\u0000\u012c\u04ff\u0001"+ - "\u0000\u0000\u0000\u012e\u0503\u0001\u0000\u0000\u0000\u0130\u0507\u0001"+ - "\u0000\u0000\u0000\u0132\u050b\u0001\u0000\u0000\u0000\u0134\u050f\u0001"+ - "\u0000\u0000\u0000\u0136\u0513\u0001\u0000\u0000\u0000\u0138\u0518\u0001"+ - "\u0000\u0000\u0000\u013a\u051d\u0001\u0000\u0000\u0000\u013c\u0521\u0001"+ - "\u0000\u0000\u0000\u013e\u0525\u0001\u0000\u0000\u0000\u0140\u0529\u0001"+ - "\u0000\u0000\u0000\u0142\u052e\u0001\u0000\u0000\u0000\u0144\u0532\u0001"+ - "\u0000\u0000\u0000\u0146\u0537\u0001\u0000\u0000\u0000\u0148\u053c\u0001"+ - "\u0000\u0000\u0000\u014a\u0540\u0001\u0000\u0000\u0000\u014c\u0544\u0001"+ - "\u0000\u0000\u0000\u014e\u0548\u0001\u0000\u0000\u0000\u0150\u054c\u0001"+ - "\u0000\u0000\u0000\u0152\u0550\u0001\u0000\u0000\u0000\u0154\u0555\u0001"+ - "\u0000\u0000\u0000\u0156\u055a\u0001\u0000\u0000\u0000\u0158\u055e\u0001"+ - "\u0000\u0000\u0000\u015a\u0562\u0001\u0000\u0000\u0000\u015c\u0566\u0001"+ - "\u0000\u0000\u0000\u015e\u056b\u0001\u0000\u0000\u0000\u0160\u0574\u0001"+ - "\u0000\u0000\u0000\u0162\u0578\u0001\u0000\u0000\u0000\u0164\u057c\u0001"+ - "\u0000\u0000\u0000\u0166\u0580\u0001\u0000\u0000\u0000\u0168\u0584\u0001"+ - "\u0000\u0000\u0000\u016a\u0589\u0001\u0000\u0000\u0000\u016c\u058d\u0001"+ - "\u0000\u0000\u0000\u016e\u0591\u0001\u0000\u0000\u0000\u0170\u0595\u0001"+ - "\u0000\u0000\u0000\u0172\u059a\u0001\u0000\u0000\u0000\u0174\u059e\u0001"+ - "\u0000\u0000\u0000\u0176\u05a2\u0001\u0000\u0000\u0000\u0178\u05a6\u0001"+ - "\u0000\u0000\u0000\u017a\u05aa\u0001\u0000\u0000\u0000\u017c\u05ae\u0001"+ - "\u0000\u0000\u0000\u017e\u05b4\u0001\u0000\u0000\u0000\u0180\u05b8\u0001"+ - "\u0000\u0000\u0000\u0182\u05bc\u0001\u0000\u0000\u0000\u0184\u05c0\u0001"+ - "\u0000\u0000\u0000\u0186\u05c4\u0001\u0000\u0000\u0000\u0188\u05c8\u0001"+ - "\u0000\u0000\u0000\u018a\u05cc\u0001\u0000\u0000\u0000\u018c\u05d1\u0001"+ - "\u0000\u0000\u0000\u018e\u05d5\u0001\u0000\u0000\u0000\u0190\u05d9\u0001"+ - "\u0000\u0000\u0000\u0192\u05df\u0001\u0000\u0000\u0000\u0194\u05e8\u0001"+ - "\u0000\u0000\u0000\u0196\u05ec\u0001\u0000\u0000\u0000\u0198\u05f0\u0001"+ - "\u0000\u0000\u0000\u019a\u05f4\u0001\u0000\u0000\u0000\u019c\u05f8\u0001"+ - "\u0000\u0000\u0000\u019e\u05fc\u0001\u0000\u0000\u0000\u01a0\u0601\u0001"+ - "\u0000\u0000\u0000\u01a2\u0607\u0001\u0000\u0000\u0000\u01a4\u060d\u0001"+ - "\u0000\u0000\u0000\u01a6\u0611\u0001\u0000\u0000\u0000\u01a8\u0615\u0001"+ - "\u0000\u0000\u0000\u01aa\u0619\u0001\u0000\u0000\u0000\u01ac\u061f\u0001"+ - "\u0000\u0000\u0000\u01ae\u0625\u0001\u0000\u0000\u0000\u01b0\u0629\u0001"+ - "\u0000\u0000\u0000\u01b2\u062d\u0001\u0000\u0000\u0000\u01b4\u0631\u0001"+ - "\u0000\u0000\u0000\u01b6\u0637\u0001\u0000\u0000\u0000\u01b8\u063d\u0001"+ - "\u0000\u0000\u0000\u01ba\u0643\u0001\u0000\u0000\u0000\u01bc\u01bd\u0007"+ - "\u0000\u0000\u0000\u01bd\u01be\u0007\u0001\u0000\u0000\u01be\u01bf\u0007"+ - "\u0002\u0000\u0000\u01bf\u01c0\u0007\u0002\u0000\u0000\u01c0\u01c1\u0007"+ - "\u0003\u0000\u0000\u01c1\u01c2\u0007\u0004\u0000\u0000\u01c2\u01c3\u0007"+ - "\u0005\u0000\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c5\u0006"+ - "\u0000\u0000\u0000\u01c5\u0011\u0001\u0000\u0000\u0000\u01c6\u01c7\u0007"+ - "\u0000\u0000\u0000\u01c7\u01c8\u0007\u0006\u0000\u0000\u01c8\u01c9\u0007"+ - "\u0007\u0000\u0000\u01c9\u01ca\u0007\b\u0000\u0000\u01ca\u01cb\u0001\u0000"+ - "\u0000\u0000\u01cb\u01cc\u0006\u0001\u0001\u0000\u01cc\u0013\u0001\u0000"+ - "\u0000\u0000\u01cd\u01ce\u0007\u0003\u0000\u0000\u01ce\u01cf\u0007\t\u0000"+ - "\u0000\u01cf\u01d0\u0007\u0006\u0000\u0000\u01d0\u01d1\u0007\u0001\u0000"+ - "\u0000\u01d1\u01d2\u0007\u0004\u0000\u0000\u01d2\u01d3\u0007\n\u0000\u0000"+ - "\u01d3\u01d4\u0001\u0000\u0000\u0000\u01d4\u01d5\u0006\u0002\u0002\u0000"+ - "\u01d5\u0015\u0001\u0000\u0000\u0000\u01d6\u01d7\u0007\u0003\u0000\u0000"+ - "\u01d7\u01d8\u0007\u000b\u0000\u0000\u01d8\u01d9\u0007\f\u0000\u0000\u01d9"+ - "\u01da\u0007\r\u0000\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc"+ - "\u0006\u0003\u0000\u0000\u01dc\u0017\u0001\u0000\u0000\u0000\u01dd\u01de"+ - "\u0007\u0003\u0000\u0000\u01de\u01df\u0007\u000e\u0000\u0000\u01df\u01e0"+ - "\u0007\b\u0000\u0000\u01e0\u01e1\u0007\r\u0000\u0000\u01e1\u01e2\u0007"+ - "\f\u0000\u0000\u01e2\u01e3\u0007\u0001\u0000\u0000\u01e3\u01e4\u0007\t"+ - "\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e6\u0006\u0004"+ - "\u0003\u0000\u01e6\u0019\u0001\u0000\u0000\u0000\u01e7\u01e8\u0007\u000f"+ - "\u0000\u0000\u01e8\u01e9\u0007\u0006\u0000\u0000\u01e9\u01ea\u0007\u0007"+ - "\u0000\u0000\u01ea\u01eb\u0007\u0010\u0000\u0000\u01eb\u01ec\u0001\u0000"+ - "\u0000\u0000\u01ec\u01ed\u0006\u0005\u0004\u0000\u01ed\u001b\u0001\u0000"+ - "\u0000\u0000\u01ee\u01ef\u0007\u0011\u0000\u0000\u01ef\u01f0\u0007\u0006"+ - "\u0000\u0000\u01f0\u01f1\u0007\u0007\u0000\u0000\u01f1\u01f2\u0007\u0012"+ - "\u0000\u0000\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u01f4\u0006\u0006"+ - "\u0000\u0000\u01f4\u001d\u0001\u0000\u0000\u0000\u01f5\u01f6\u0007\u0012"+ - "\u0000\u0000\u01f6\u01f7\u0007\u0003\u0000\u0000\u01f7\u01f8\u0007\u0003"+ - "\u0000\u0000\u01f8\u01f9\u0007\b\u0000\u0000\u01f9\u01fa\u0001\u0000\u0000"+ - "\u0000\u01fa\u01fb\u0006\u0007\u0001\u0000\u01fb\u001f\u0001\u0000\u0000"+ - "\u0000\u01fc\u01fd\u0007\r\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000\u0000"+ - "\u01fe\u01ff\u0007\u0010\u0000\u0000\u01ff\u0200\u0007\u0001\u0000\u0000"+ - "\u0200\u0201\u0007\u0005\u0000\u0000\u0201\u0202\u0001\u0000\u0000\u0000"+ - "\u0202\u0203\u0006\b\u0000\u0000\u0203!\u0001\u0000\u0000\u0000\u0204"+ - "\u0205\u0007\u0010\u0000\u0000\u0205\u0206\u0007\u000b\u0000\u0000\u0206"+ - "\u0207\u0005_\u0000\u0000\u0207\u0208\u0007\u0003\u0000\u0000\u0208\u0209"+ - "\u0007\u000e\u0000\u0000\u0209\u020a\u0007\b\u0000\u0000\u020a\u020b\u0007"+ - "\f\u0000\u0000\u020b\u020c\u0007\t\u0000\u0000\u020c\u020d\u0007\u0000"+ - "\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000\u020e\u020f\u0006\t\u0005"+ - "\u0000\u020f#\u0001\u0000\u0000\u0000\u0210\u0211\u0007\u0006\u0000\u0000"+ - "\u0211\u0212\u0007\u0003\u0000\u0000\u0212\u0213\u0007\t\u0000\u0000\u0213"+ - "\u0214\u0007\f\u0000\u0000\u0214\u0215\u0007\u0010\u0000\u0000\u0215\u0216"+ - "\u0007\u0003\u0000\u0000\u0216\u0217\u0001\u0000\u0000\u0000\u0217\u0218"+ - "\u0006\n\u0006\u0000\u0218%\u0001\u0000\u0000\u0000\u0219\u021a\u0007"+ - "\u0006\u0000\u0000\u021a\u021b\u0007\u0007\u0000\u0000\u021b\u021c\u0007"+ - "\u0013\u0000\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d\u021e\u0006"+ - "\u000b\u0000\u0000\u021e\'\u0001\u0000\u0000\u0000\u021f\u0220\u0007\u0002"+ - "\u0000\u0000\u0220\u0221\u0007\n\u0000\u0000\u0221\u0222\u0007\u0007\u0000"+ - "\u0000\u0222\u0223\u0007\u0013\u0000\u0000\u0223\u0224\u0001\u0000\u0000"+ - "\u0000\u0224\u0225\u0006\f\u0007\u0000\u0225)\u0001\u0000\u0000\u0000"+ - "\u0226\u0227\u0007\u0002\u0000\u0000\u0227\u0228\u0007\u0007\u0000\u0000"+ - "\u0228\u0229\u0007\u0006\u0000\u0000\u0229\u022a\u0007\u0005\u0000\u0000"+ - "\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0006\r\u0000\u0000\u022c"+ - "+\u0001\u0000\u0000\u0000\u022d\u022e\u0007\u0002\u0000\u0000\u022e\u022f"+ - "\u0007\u0005\u0000\u0000\u022f\u0230\u0007\f\u0000\u0000\u0230\u0231\u0007"+ - "\u0005\u0000\u0000\u0231\u0232\u0007\u0002\u0000\u0000\u0232\u0233\u0001"+ - "\u0000\u0000\u0000\u0233\u0234\u0006\u000e\u0000\u0000\u0234-\u0001\u0000"+ - "\u0000\u0000\u0235\u0236\u0007\u0013\u0000\u0000\u0236\u0237\u0007\n\u0000"+ - "\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0007\u0006\u0000"+ - "\u0000\u0239\u023a\u0007\u0003\u0000\u0000\u023a\u023b\u0001\u0000\u0000"+ - "\u0000\u023b\u023c\u0006\u000f\u0000\u0000\u023c/\u0001\u0000\u0000\u0000"+ - "\u023d\u023e\u0004\u0010\u0000\u0000\u023e\u023f\u0007\u0001\u0000\u0000"+ - "\u023f\u0240\u0007\t\u0000\u0000\u0240\u0241\u0007\r\u0000\u0000\u0241"+ - "\u0242\u0007\u0001\u0000\u0000\u0242\u0243\u0007\t\u0000\u0000\u0243\u0244"+ - "\u0007\u0003\u0000\u0000\u0244\u0245\u0007\u0002\u0000\u0000\u0245\u0246"+ - "\u0007\u0005\u0000\u0000\u0246\u0247\u0007\f\u0000\u0000\u0247\u0248\u0007"+ - "\u0005\u0000\u0000\u0248\u0249\u0007\u0002\u0000\u0000\u0249\u024a\u0001"+ - "\u0000\u0000\u0000\u024a\u024b\u0006\u0010\u0000\u0000\u024b1\u0001\u0000"+ - "\u0000\u0000\u024c\u024d\u0004\u0011\u0001\u0000\u024d\u024e\u0007\r\u0000"+ - "\u0000\u024e\u024f\u0007\u0007\u0000\u0000\u024f\u0250\u0007\u0007\u0000"+ - "\u0000\u0250\u0251\u0007\u0012\u0000\u0000\u0251\u0252\u0007\u0014\u0000"+ - "\u0000\u0252\u0253\u0007\b\u0000\u0000\u0253\u0254\u0005_\u0000\u0000"+ - "\u0254\u0255\u0005\u8001\uf414\u0000\u0000\u0255\u0256\u0001\u0000\u0000"+ - "\u0000\u0256\u0257\u0006\u0011\b\u0000\u02573\u0001\u0000\u0000\u0000"+ - "\u0258\u0259\u0004\u0012\u0002\u0000\u0259\u025a\u0007\u0010\u0000\u0000"+ - "\u025a\u025b\u0007\u0003\u0000\u0000\u025b\u025c\u0007\u0005\u0000\u0000"+ - "\u025c\u025d\u0007\u0006\u0000\u0000\u025d\u025e\u0007\u0001\u0000\u0000"+ - "\u025e\u025f\u0007\u0004\u0000\u0000\u025f\u0260\u0007\u0002\u0000\u0000"+ - "\u0260\u0261\u0001\u0000\u0000\u0000\u0261\u0262\u0006\u0012\t\u0000\u0262"+ - "5\u0001\u0000\u0000\u0000\u0263\u0264\u0004\u0013\u0003\u0000\u0264\u0265"+ - "\u0007\u0015\u0000\u0000\u0265\u0266\u0007\u0007\u0000\u0000\u0266\u0267"+ - "\u0007\u0001\u0000\u0000\u0267\u0268\u0007\t\u0000\u0000\u0268\u0269\u0001"+ - "\u0000\u0000\u0000\u0269\u026a\u0006\u0013\n\u0000\u026a7\u0001\u0000"+ - "\u0000\u0000\u026b\u026c\u0004\u0014\u0004\u0000\u026c\u026d\u0007\u000f"+ - "\u0000\u0000\u026d\u026e\u0007\u0014\u0000\u0000\u026e\u026f\u0007\r\u0000"+ - "\u0000\u026f\u0270\u0007\r\u0000\u0000\u0270\u0271\u0001\u0000\u0000\u0000"+ - "\u0271\u0272\u0006\u0014\n\u0000\u02729\u0001\u0000\u0000\u0000\u0273"+ - "\u0274\u0004\u0015\u0005\u0000\u0274\u0275\u0007\r\u0000\u0000\u0275\u0276"+ - "\u0007\u0003\u0000\u0000\u0276\u0277\u0007\u000f\u0000\u0000\u0277\u0278"+ - "\u0007\u0005\u0000\u0000\u0278\u0279\u0001\u0000\u0000\u0000\u0279\u027a"+ - "\u0006\u0015\n\u0000\u027a;\u0001\u0000\u0000\u0000\u027b\u027c\u0004"+ - "\u0016\u0006\u0000\u027c\u027d\u0007\u0006\u0000\u0000\u027d\u027e\u0007"+ - "\u0001\u0000\u0000\u027e\u027f\u0007\u0011\u0000\u0000\u027f\u0280\u0007"+ - "\n\u0000\u0000\u0280\u0281\u0007\u0005\u0000\u0000\u0281\u0282\u0001\u0000"+ - "\u0000\u0000\u0282\u0283\u0006\u0016\n\u0000\u0283=\u0001\u0000\u0000"+ - "\u0000\u0284\u0285\u0004\u0017\u0007\u0000\u0285\u0286\u0007\r\u0000\u0000"+ - "\u0286\u0287\u0007\u0007\u0000\u0000\u0287\u0288\u0007\u0007\u0000\u0000"+ - "\u0288\u0289\u0007\u0012\u0000\u0000\u0289\u028a\u0007\u0014\u0000\u0000"+ - "\u028a\u028b\u0007\b\u0000\u0000\u028b\u028c\u0001\u0000\u0000\u0000\u028c"+ - "\u028d\u0006\u0017\n\u0000\u028d?\u0001\u0000\u0000\u0000\u028e\u0290"+ - "\b\u0016\u0000\u0000\u028f\u028e\u0001\u0000\u0000\u0000\u0290\u0291\u0001"+ - "\u0000\u0000\u0000\u0291\u028f\u0001\u0000\u0000\u0000\u0291\u0292\u0001"+ - "\u0000\u0000\u0000\u0292\u0293\u0001\u0000\u0000\u0000\u0293\u0294\u0006"+ - "\u0018\u0000\u0000\u0294A\u0001\u0000\u0000\u0000\u0295\u0296\u0005/\u0000"+ - "\u0000\u0296\u0297\u0005/\u0000\u0000\u0297\u029b\u0001\u0000\u0000\u0000"+ - "\u0298\u029a\b\u0017\u0000\u0000\u0299\u0298\u0001\u0000\u0000\u0000\u029a"+ - "\u029d\u0001\u0000\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029b"+ - "\u029c\u0001\u0000\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d"+ - "\u029b\u0001\u0000\u0000\u0000\u029e\u02a0\u0005\r\u0000\u0000\u029f\u029e"+ - "\u0001\u0000\u0000\u0000\u029f\u02a0\u0001\u0000\u0000\u0000\u02a0\u02a2"+ - "\u0001\u0000\u0000\u0000\u02a1\u02a3\u0005\n\u0000\u0000\u02a2\u02a1\u0001"+ - "\u0000\u0000\u0000\u02a2\u02a3\u0001\u0000\u0000\u0000\u02a3\u02a4\u0001"+ - "\u0000\u0000\u0000\u02a4\u02a5\u0006\u0019\u000b\u0000\u02a5C\u0001\u0000"+ - "\u0000\u0000\u02a6\u02a7\u0005/\u0000\u0000\u02a7\u02a8\u0005*\u0000\u0000"+ - "\u02a8\u02ad\u0001\u0000\u0000\u0000\u02a9\u02ac\u0003D\u001a\u0000\u02aa"+ - "\u02ac\t\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000\u0000\u0000\u02ab\u02aa"+ - "\u0001\u0000\u0000\u0000\u02ac\u02af\u0001\u0000\u0000\u0000\u02ad\u02ae"+ - "\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000\u0000\u0000\u02ae\u02b0"+ - "\u0001\u0000\u0000\u0000\u02af\u02ad\u0001\u0000\u0000\u0000\u02b0\u02b1"+ - "\u0005*\u0000\u0000\u02b1\u02b2\u0005/\u0000\u0000\u02b2\u02b3\u0001\u0000"+ - "\u0000\u0000\u02b3\u02b4\u0006\u001a\u000b\u0000\u02b4E\u0001\u0000\u0000"+ - "\u0000\u02b5\u02b7\u0007\u0018\u0000\u0000\u02b6\u02b5\u0001\u0000\u0000"+ - "\u0000\u02b7\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b6\u0001\u0000\u0000"+ - "\u0000\u02b8\u02b9\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000"+ - "\u0000\u02ba\u02bb\u0006\u001b\u000b\u0000\u02bbG\u0001\u0000\u0000\u0000"+ - "\u02bc\u02bd\u0005:\u0000\u0000\u02bdI\u0001\u0000\u0000\u0000\u02be\u02bf"+ - "\u0005|\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1\u0006"+ - "\u001d\f\u0000\u02c1K\u0001\u0000\u0000\u0000\u02c2\u02c3\u0007\u0019"+ - "\u0000\u0000\u02c3M\u0001\u0000\u0000\u0000\u02c4\u02c5\u0007\u001a\u0000"+ - "\u0000\u02c5O\u0001\u0000\u0000\u0000\u02c6\u02c7\u0005\\\u0000\u0000"+ - "\u02c7\u02c8\u0007\u001b\u0000\u0000\u02c8Q\u0001\u0000\u0000\u0000\u02c9"+ - "\u02ca\b\u001c\u0000\u0000\u02caS\u0001\u0000\u0000\u0000\u02cb\u02cd"+ - "\u0007\u0003\u0000\u0000\u02cc\u02ce\u0007\u001d\u0000\u0000\u02cd\u02cc"+ - "\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000\u0000\u0000\u02ce\u02d0"+ - "\u0001\u0000\u0000\u0000\u02cf\u02d1\u0003L\u001e\u0000\u02d0\u02cf\u0001"+ - "\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000\u0000\u0000\u02d2\u02d0\u0001"+ - "\u0000\u0000\u0000\u02d2\u02d3\u0001\u0000\u0000\u0000\u02d3U\u0001\u0000"+ - "\u0000\u0000\u02d4\u02d5\u0005@\u0000\u0000\u02d5W\u0001\u0000\u0000\u0000"+ - "\u02d6\u02d7\u0005`\u0000\u0000\u02d7Y\u0001\u0000\u0000\u0000\u02d8\u02dc"+ - "\b\u001e\u0000\u0000\u02d9\u02da\u0005`\u0000\u0000\u02da\u02dc\u0005"+ - "`\u0000\u0000\u02db\u02d8\u0001\u0000\u0000\u0000\u02db\u02d9\u0001\u0000"+ - "\u0000\u0000\u02dc[\u0001\u0000\u0000\u0000\u02dd\u02de\u0005_\u0000\u0000"+ - "\u02de]\u0001\u0000\u0000\u0000\u02df\u02e3\u0003N\u001f\u0000\u02e0\u02e3"+ - "\u0003L\u001e\u0000\u02e1\u02e3\u0003\\&\u0000\u02e2\u02df\u0001\u0000"+ - "\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000\u0000\u02e2\u02e1\u0001\u0000"+ - "\u0000\u0000\u02e3_\u0001\u0000\u0000\u0000\u02e4\u02e9\u0005\"\u0000"+ - "\u0000\u02e5\u02e8\u0003P \u0000\u02e6\u02e8\u0003R!\u0000\u02e7\u02e5"+ - "\u0001\u0000\u0000\u0000\u02e7\u02e6\u0001\u0000\u0000\u0000\u02e8\u02eb"+ - "\u0001\u0000\u0000\u0000\u02e9\u02e7\u0001\u0000\u0000\u0000\u02e9\u02ea"+ - "\u0001\u0000\u0000\u0000\u02ea\u02ec\u0001\u0000\u0000\u0000\u02eb\u02e9"+ - "\u0001\u0000\u0000\u0000\u02ec\u0302\u0005\"\u0000\u0000\u02ed\u02ee\u0005"+ - "\"\u0000\u0000\u02ee\u02ef\u0005\"\u0000\u0000\u02ef\u02f0\u0005\"\u0000"+ - "\u0000\u02f0\u02f4\u0001\u0000\u0000\u0000\u02f1\u02f3\b\u0017\u0000\u0000"+ - "\u02f2\u02f1\u0001\u0000\u0000\u0000\u02f3\u02f6\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f4\u02f2\u0001\u0000\u0000\u0000"+ - "\u02f5\u02f7\u0001\u0000\u0000\u0000\u02f6\u02f4\u0001\u0000\u0000\u0000"+ - "\u02f7\u02f8\u0005\"\u0000\u0000\u02f8\u02f9\u0005\"\u0000\u0000\u02f9"+ - "\u02fa\u0005\"\u0000\u0000\u02fa\u02fc\u0001\u0000\u0000\u0000\u02fb\u02fd"+ - "\u0005\"\u0000\u0000\u02fc\u02fb\u0001\u0000\u0000\u0000\u02fc\u02fd\u0001"+ - "\u0000\u0000\u0000\u02fd\u02ff\u0001\u0000\u0000\u0000\u02fe\u0300\u0005"+ - "\"\u0000\u0000\u02ff\u02fe\u0001\u0000\u0000\u0000\u02ff\u0300\u0001\u0000"+ - "\u0000\u0000\u0300\u0302\u0001\u0000\u0000\u0000\u0301\u02e4\u0001\u0000"+ - "\u0000\u0000\u0301\u02ed\u0001\u0000\u0000\u0000\u0302a\u0001\u0000\u0000"+ - "\u0000\u0303\u0305\u0003L\u001e\u0000\u0304\u0303\u0001\u0000\u0000\u0000"+ - "\u0305\u0306\u0001\u0000\u0000\u0000\u0306\u0304\u0001\u0000\u0000\u0000"+ - "\u0306\u0307\u0001\u0000\u0000\u0000\u0307c\u0001\u0000\u0000\u0000\u0308"+ - "\u030a\u0003L\u001e\u0000\u0309\u0308\u0001\u0000\u0000\u0000\u030a\u030b"+ - "\u0001\u0000\u0000\u0000\u030b\u0309\u0001\u0000\u0000\u0000\u030b\u030c"+ - "\u0001\u0000\u0000\u0000\u030c\u030d\u0001\u0000\u0000\u0000\u030d\u0311"+ - "\u0003t2\u0000\u030e\u0310\u0003L\u001e\u0000\u030f\u030e\u0001\u0000"+ - "\u0000\u0000\u0310\u0313\u0001\u0000\u0000\u0000\u0311\u030f\u0001\u0000"+ - "\u0000\u0000\u0311\u0312\u0001\u0000\u0000\u0000\u0312\u0333\u0001\u0000"+ - "\u0000\u0000\u0313\u0311\u0001\u0000\u0000\u0000\u0314\u0316\u0003t2\u0000"+ - "\u0315\u0317\u0003L\u001e\u0000\u0316\u0315\u0001\u0000\u0000\u0000\u0317"+ - "\u0318\u0001\u0000\u0000\u0000\u0318\u0316\u0001\u0000\u0000\u0000\u0318"+ - "\u0319\u0001\u0000\u0000\u0000\u0319\u0333\u0001\u0000\u0000\u0000\u031a"+ - "\u031c\u0003L\u001e\u0000\u031b\u031a\u0001\u0000\u0000\u0000\u031c\u031d"+ - "\u0001\u0000\u0000\u0000\u031d\u031b\u0001\u0000\u0000\u0000\u031d\u031e"+ - "\u0001\u0000\u0000\u0000\u031e\u0326\u0001\u0000\u0000\u0000\u031f\u0323"+ - "\u0003t2\u0000\u0320\u0322\u0003L\u001e\u0000\u0321\u0320\u0001\u0000"+ - "\u0000\u0000\u0322\u0325\u0001\u0000\u0000\u0000\u0323\u0321\u0001\u0000"+ - "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0327\u0001\u0000"+ - "\u0000\u0000\u0325\u0323\u0001\u0000\u0000\u0000\u0326\u031f\u0001\u0000"+ - "\u0000\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327\u0328\u0001\u0000"+ - "\u0000\u0000\u0328\u0329\u0003T\"\u0000\u0329\u0333\u0001\u0000\u0000"+ - "\u0000\u032a\u032c\u0003t2\u0000\u032b\u032d\u0003L\u001e\u0000\u032c"+ - "\u032b\u0001\u0000\u0000\u0000\u032d\u032e\u0001\u0000\u0000\u0000\u032e"+ - "\u032c\u0001\u0000\u0000\u0000\u032e\u032f\u0001\u0000\u0000\u0000\u032f"+ - "\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0003T\"\u0000\u0331\u0333"+ - "\u0001\u0000\u0000\u0000\u0332\u0309\u0001\u0000\u0000\u0000\u0332\u0314"+ - "\u0001\u0000\u0000\u0000\u0332\u031b\u0001\u0000\u0000\u0000\u0332\u032a"+ - "\u0001\u0000\u0000\u0000\u0333e\u0001\u0000\u0000\u0000\u0334\u0335\u0007"+ - "\u001f\u0000\u0000\u0335\u0336\u0007 \u0000\u0000\u0336g\u0001\u0000\u0000"+ - "\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\t\u0000\u0000"+ - "\u0339\u033a\u0007\u0000\u0000\u0000\u033ai\u0001\u0000\u0000\u0000\u033b"+ - "\u033c\u0007\f\u0000\u0000\u033c\u033d\u0007\u0002\u0000\u0000\u033d\u033e"+ - "\u0007\u0004\u0000\u0000\u033ek\u0001\u0000\u0000\u0000\u033f\u0340\u0005"+ - "=\u0000\u0000\u0340m\u0001\u0000\u0000\u0000\u0341\u0342\u0005:\u0000"+ - "\u0000\u0342\u0343\u0005:\u0000\u0000\u0343o\u0001\u0000\u0000\u0000\u0344"+ - "\u0345\u0005,\u0000\u0000\u0345q\u0001\u0000\u0000\u0000\u0346\u0347\u0007"+ - "\u0000\u0000\u0000\u0347\u0348\u0007\u0003\u0000\u0000\u0348\u0349\u0007"+ - "\u0002\u0000\u0000\u0349\u034a\u0007\u0004\u0000\u0000\u034as\u0001\u0000"+ - "\u0000\u0000\u034b\u034c\u0005.\u0000\u0000\u034cu\u0001\u0000\u0000\u0000"+ - "\u034d\u034e\u0007\u000f\u0000\u0000\u034e\u034f\u0007\f\u0000\u0000\u034f"+ - "\u0350\u0007\r\u0000\u0000\u0350\u0351\u0007\u0002\u0000\u0000\u0351\u0352"+ - "\u0007\u0003\u0000\u0000\u0352w\u0001\u0000\u0000\u0000\u0353\u0354\u0007"+ - "\u000f\u0000\u0000\u0354\u0355\u0007\u0001\u0000\u0000\u0355\u0356\u0007"+ - "\u0006\u0000\u0000\u0356\u0357\u0007\u0002\u0000\u0000\u0357\u0358\u0007"+ - "\u0005\u0000\u0000\u0358y\u0001\u0000\u0000\u0000\u0359\u035a\u0007\u0001"+ - "\u0000\u0000\u035a\u035b\u0007\t\u0000\u0000\u035b{\u0001\u0000\u0000"+ - "\u0000\u035c\u035d\u0007\u0001\u0000\u0000\u035d\u035e\u0007\u0002\u0000"+ - "\u0000\u035e}\u0001\u0000\u0000\u0000\u035f\u0360\u0007\r\u0000\u0000"+ - "\u0360\u0361\u0007\f\u0000\u0000\u0361\u0362\u0007\u0002\u0000\u0000\u0362"+ - "\u0363\u0007\u0005\u0000\u0000\u0363\u007f\u0001\u0000\u0000\u0000\u0364"+ - "\u0365\u0007\r\u0000\u0000\u0365\u0366\u0007\u0001\u0000\u0000\u0366\u0367"+ - "\u0007\u0012\u0000\u0000\u0367\u0368\u0007\u0003\u0000\u0000\u0368\u0081"+ - "\u0001\u0000\u0000\u0000\u0369\u036a\u0005(\u0000\u0000\u036a\u0083\u0001"+ - "\u0000\u0000\u0000\u036b\u036c\u0007\t\u0000\u0000\u036c\u036d\u0007\u0007"+ - "\u0000\u0000\u036d\u036e\u0007\u0005\u0000\u0000\u036e\u0085\u0001\u0000"+ - "\u0000\u0000\u036f\u0370\u0007\t\u0000\u0000\u0370\u0371\u0007\u0014\u0000"+ - "\u0000\u0371\u0372\u0007\r\u0000\u0000\u0372\u0373\u0007\r\u0000\u0000"+ - "\u0373\u0087\u0001\u0000\u0000\u0000\u0374\u0375\u0007\t\u0000\u0000\u0375"+ - "\u0376\u0007\u0014\u0000\u0000\u0376\u0377\u0007\r\u0000\u0000\u0377\u0378"+ - "\u0007\r\u0000\u0000\u0378\u0379\u0007\u0002\u0000\u0000\u0379\u0089\u0001"+ - "\u0000\u0000\u0000\u037a\u037b\u0007\u0007\u0000\u0000\u037b\u037c\u0007"+ - "\u0006\u0000\u0000\u037c\u008b\u0001\u0000\u0000\u0000\u037d\u037e\u0005"+ - "?\u0000\u0000\u037e\u008d\u0001\u0000\u0000\u0000\u037f\u0380\u0007\u0006"+ - "\u0000\u0000\u0380\u0381\u0007\r\u0000\u0000\u0381\u0382\u0007\u0001\u0000"+ - "\u0000\u0382\u0383\u0007\u0012\u0000\u0000\u0383\u0384\u0007\u0003\u0000"+ - "\u0000\u0384\u008f\u0001\u0000\u0000\u0000\u0385\u0386\u0005)\u0000\u0000"+ - "\u0386\u0091\u0001\u0000\u0000\u0000\u0387\u0388\u0007\u0005\u0000\u0000"+ - "\u0388\u0389\u0007\u0006\u0000\u0000\u0389\u038a\u0007\u0014\u0000\u0000"+ - "\u038a\u038b\u0007\u0003\u0000\u0000\u038b\u0093\u0001\u0000\u0000\u0000"+ - "\u038c\u038d\u0005=\u0000\u0000\u038d\u038e\u0005=\u0000\u0000\u038e\u0095"+ - "\u0001\u0000\u0000\u0000\u038f\u0390\u0005=\u0000\u0000\u0390\u0391\u0005"+ - "~\u0000\u0000\u0391\u0097\u0001\u0000\u0000\u0000\u0392\u0393\u0005!\u0000"+ - "\u0000\u0393\u0394\u0005=\u0000\u0000\u0394\u0099\u0001\u0000\u0000\u0000"+ - "\u0395\u0396\u0005<\u0000\u0000\u0396\u009b\u0001\u0000\u0000\u0000\u0397"+ - "\u0398\u0005<\u0000\u0000\u0398\u0399\u0005=\u0000\u0000\u0399\u009d\u0001"+ - "\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u009f\u0001\u0000"+ - "\u0000\u0000\u039c\u039d\u0005>\u0000\u0000\u039d\u039e\u0005=\u0000\u0000"+ - "\u039e\u00a1\u0001\u0000\u0000\u0000\u039f\u03a0\u0005+\u0000\u0000\u03a0"+ - "\u00a3\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005-\u0000\u0000\u03a2\u00a5"+ - "\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005*\u0000\u0000\u03a4\u00a7\u0001"+ - "\u0000\u0000\u0000\u03a5\u03a6\u0005/\u0000\u0000\u03a6\u00a9\u0001\u0000"+ - "\u0000\u0000\u03a7\u03a8\u0005%\u0000\u0000\u03a8\u00ab\u0001\u0000\u0000"+ - "\u0000\u03a9\u03aa\u0004N\b\u0000\u03aa\u03ab\u0003H\u001c\u0000\u03ab"+ - "\u03ac\u0001\u0000\u0000\u0000\u03ac\u03ad\u0006N\r\u0000\u03ad\u00ad"+ - "\u0001\u0000\u0000\u0000\u03ae\u03af\u0003.\u000f\u0000\u03af\u03b0\u0001"+ - "\u0000\u0000\u0000\u03b0\u03b1\u0006O\u000e\u0000\u03b1\u00af\u0001\u0000"+ - "\u0000\u0000\u03b2\u03b5\u0003\u008c>\u0000\u03b3\u03b6\u0003N\u001f\u0000"+ - "\u03b4\u03b6\u0003\\&\u0000\u03b5\u03b3\u0001\u0000\u0000\u0000\u03b5"+ - "\u03b4\u0001\u0000\u0000\u0000\u03b6\u03ba\u0001\u0000\u0000\u0000\u03b7"+ - "\u03b9\u0003^\'\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03bc"+ - "\u0001\u0000\u0000\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb"+ - "\u0001\u0000\u0000\u0000\u03bb\u03c4\u0001\u0000\u0000\u0000\u03bc\u03ba"+ - "\u0001\u0000\u0000\u0000\u03bd\u03bf\u0003\u008c>\u0000\u03be\u03c0\u0003"+ - "L\u001e\u0000\u03bf\u03be\u0001\u0000\u0000\u0000\u03c0\u03c1\u0001\u0000"+ - "\u0000\u0000\u03c1\u03bf\u0001\u0000\u0000\u0000\u03c1\u03c2\u0001\u0000"+ - "\u0000\u0000\u03c2\u03c4\u0001\u0000\u0000\u0000\u03c3\u03b2\u0001\u0000"+ - "\u0000\u0000\u03c3\u03bd\u0001\u0000\u0000\u0000\u03c4\u00b1\u0001\u0000"+ - "\u0000\u0000\u03c5\u03c6\u0005[\u0000\u0000\u03c6\u03c7\u0001\u0000\u0000"+ - "\u0000\u03c7\u03c8\u0006Q\u0000\u0000\u03c8\u03c9\u0006Q\u0000\u0000\u03c9"+ - "\u00b3\u0001\u0000\u0000\u0000\u03ca\u03cb\u0005]\u0000\u0000\u03cb\u03cc"+ - "\u0001\u0000\u0000\u0000\u03cc\u03cd\u0006R\f\u0000\u03cd\u03ce\u0006"+ - "R\f\u0000\u03ce\u00b5\u0001\u0000\u0000\u0000\u03cf\u03d3\u0003N\u001f"+ - "\u0000\u03d0\u03d2\u0003^\'\u0000\u03d1\u03d0\u0001\u0000\u0000\u0000"+ - "\u03d2\u03d5\u0001\u0000\u0000\u0000\u03d3\u03d1\u0001\u0000\u0000\u0000"+ - "\u03d3\u03d4\u0001\u0000\u0000\u0000\u03d4\u03e0\u0001\u0000\u0000\u0000"+ - "\u03d5\u03d3\u0001\u0000\u0000\u0000\u03d6\u03d9\u0003\\&\u0000\u03d7"+ - "\u03d9\u0003V#\u0000\u03d8\u03d6\u0001\u0000\u0000\u0000\u03d8\u03d7\u0001"+ - "\u0000\u0000\u0000\u03d9\u03db\u0001\u0000\u0000\u0000\u03da\u03dc\u0003"+ - "^\'\u0000\u03db\u03da\u0001\u0000\u0000\u0000\u03dc\u03dd\u0001\u0000"+ - "\u0000\u0000\u03dd\u03db\u0001\u0000\u0000\u0000\u03dd\u03de\u0001\u0000"+ - "\u0000\u0000\u03de\u03e0\u0001\u0000\u0000\u0000\u03df\u03cf\u0001\u0000"+ - "\u0000\u0000\u03df\u03d8\u0001\u0000\u0000\u0000\u03e0\u00b7\u0001\u0000"+ - "\u0000\u0000\u03e1\u03e3\u0003X$\u0000\u03e2\u03e4\u0003Z%\u0000\u03e3"+ - "\u03e2\u0001\u0000\u0000\u0000\u03e4\u03e5\u0001\u0000\u0000\u0000\u03e5"+ - "\u03e3\u0001\u0000\u0000\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ - "\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0003X$\u0000\u03e8\u00b9\u0001"+ - "\u0000\u0000\u0000\u03e9\u03ea\u0003\u00b8T\u0000\u03ea\u00bb\u0001\u0000"+ - "\u0000\u0000\u03eb\u03ec\u0003B\u0019\u0000\u03ec\u03ed\u0001\u0000\u0000"+ - "\u0000\u03ed\u03ee\u0006V\u000b\u0000\u03ee\u00bd\u0001\u0000\u0000\u0000"+ - "\u03ef\u03f0\u0003D\u001a\u0000\u03f0\u03f1\u0001\u0000\u0000\u0000\u03f1"+ - "\u03f2\u0006W\u000b\u0000\u03f2\u00bf\u0001\u0000\u0000\u0000\u03f3\u03f4"+ - "\u0003F\u001b\u0000\u03f4\u03f5\u0001\u0000\u0000\u0000\u03f5\u03f6\u0006"+ - "X\u000b\u0000\u03f6\u00c1\u0001\u0000\u0000\u0000\u03f7\u03f8\u0003\u00b2"+ - "Q\u0000\u03f8\u03f9\u0001\u0000\u0000\u0000\u03f9\u03fa\u0006Y\u000f\u0000"+ - "\u03fa\u03fb\u0006Y\u0010\u0000\u03fb\u00c3\u0001\u0000\u0000\u0000\u03fc"+ - "\u03fd\u0003J\u001d\u0000\u03fd\u03fe\u0001\u0000\u0000\u0000\u03fe\u03ff"+ - "\u0006Z\u0011\u0000\u03ff\u0400\u0006Z\f\u0000\u0400\u00c5\u0001\u0000"+ - "\u0000\u0000\u0401\u0402\u0003F\u001b\u0000\u0402\u0403\u0001\u0000\u0000"+ - "\u0000\u0403\u0404\u0006[\u000b\u0000\u0404\u00c7\u0001\u0000\u0000\u0000"+ - "\u0405\u0406\u0003B\u0019\u0000\u0406\u0407\u0001\u0000\u0000\u0000\u0407"+ - "\u0408\u0006\\\u000b\u0000\u0408\u00c9\u0001\u0000\u0000\u0000\u0409\u040a"+ - "\u0003D\u001a\u0000\u040a\u040b\u0001\u0000\u0000\u0000\u040b\u040c\u0006"+ - "]\u000b\u0000\u040c\u00cb\u0001\u0000\u0000\u0000\u040d\u040e\u0003J\u001d"+ - "\u0000\u040e\u040f\u0001\u0000\u0000\u0000\u040f\u0410\u0006^\u0011\u0000"+ - "\u0410\u0411\u0006^\f\u0000\u0411\u00cd\u0001\u0000\u0000\u0000\u0412"+ - "\u0413\u0003\u00b2Q\u0000\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415"+ - "\u0006_\u000f\u0000\u0415\u00cf\u0001\u0000\u0000\u0000\u0416\u0417\u0003"+ - "\u00b4R\u0000\u0417\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006`\u0012"+ - "\u0000\u0419\u00d1\u0001\u0000\u0000\u0000\u041a\u041b\u0003H\u001c\u0000"+ - "\u041b\u041c\u0001\u0000\u0000\u0000\u041c\u041d\u0006a\r\u0000\u041d"+ - "\u00d3\u0001\u0000\u0000\u0000\u041e\u041f\u0003p0\u0000\u041f\u0420\u0001"+ - "\u0000\u0000\u0000\u0420\u0421\u0006b\u0013\u0000\u0421\u00d5\u0001\u0000"+ - "\u0000\u0000\u0422\u0423\u0003l.\u0000\u0423\u0424\u0001\u0000\u0000\u0000"+ - "\u0424\u0425\u0006c\u0014\u0000\u0425\u00d7\u0001\u0000\u0000\u0000\u0426"+ - "\u0427\u0007\u0010\u0000\u0000\u0427\u0428\u0007\u0003\u0000\u0000\u0428"+ - "\u0429\u0007\u0005\u0000\u0000\u0429\u042a\u0007\f\u0000\u0000\u042a\u042b"+ - "\u0007\u0000\u0000\u0000\u042b\u042c\u0007\f\u0000\u0000\u042c\u042d\u0007"+ - "\u0005\u0000\u0000\u042d\u042e\u0007\f\u0000\u0000\u042e\u00d9\u0001\u0000"+ - "\u0000\u0000\u042f\u0433\b!\u0000\u0000\u0430\u0431\u0005/\u0000\u0000"+ - "\u0431\u0433\b\"\u0000\u0000\u0432\u042f\u0001\u0000\u0000\u0000\u0432"+ - "\u0430\u0001\u0000\u0000\u0000\u0433\u00db\u0001\u0000\u0000\u0000\u0434"+ - "\u0436\u0003\u00dae\u0000\u0435\u0434\u0001\u0000\u0000\u0000\u0436\u0437"+ - "\u0001\u0000\u0000\u0000\u0437\u0435\u0001\u0000\u0000\u0000\u0437\u0438"+ - "\u0001\u0000\u0000\u0000\u0438\u00dd\u0001\u0000\u0000\u0000\u0439\u043a"+ - "\u0003\u00dcf\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b\u043c\u0006"+ - "g\u0015\u0000\u043c\u00df\u0001\u0000\u0000\u0000\u043d\u043e\u0003`("+ - "\u0000\u043e\u043f\u0001\u0000\u0000\u0000\u043f\u0440\u0006h\u0016\u0000"+ - "\u0440\u00e1\u0001\u0000\u0000\u0000\u0441\u0442\u0003B\u0019\u0000\u0442"+ - "\u0443\u0001\u0000\u0000\u0000\u0443\u0444\u0006i\u000b\u0000\u0444\u00e3"+ - "\u0001\u0000\u0000\u0000\u0445\u0446\u0003D\u001a\u0000\u0446\u0447\u0001"+ - "\u0000\u0000\u0000\u0447\u0448\u0006j\u000b\u0000\u0448\u00e5\u0001\u0000"+ - "\u0000\u0000\u0449\u044a\u0003F\u001b\u0000\u044a\u044b\u0001\u0000\u0000"+ - "\u0000\u044b\u044c\u0006k\u000b\u0000\u044c\u00e7\u0001\u0000\u0000\u0000"+ - "\u044d\u044e\u0003J\u001d\u0000\u044e\u044f\u0001\u0000\u0000\u0000\u044f"+ - "\u0450\u0006l\u0011\u0000\u0450\u0451\u0006l\f\u0000\u0451\u00e9\u0001"+ - "\u0000\u0000\u0000\u0452\u0453\u0003t2\u0000\u0453\u0454\u0001\u0000\u0000"+ - "\u0000\u0454\u0455\u0006m\u0017\u0000\u0455\u00eb\u0001\u0000\u0000\u0000"+ - "\u0456\u0457\u0003p0\u0000\u0457\u0458\u0001\u0000\u0000\u0000\u0458\u0459"+ - "\u0006n\u0013\u0000\u0459\u00ed\u0001\u0000\u0000\u0000\u045a\u045b\u0004"+ - "o\t\u0000\u045b\u045c\u0003\u008c>\u0000\u045c\u045d\u0001\u0000\u0000"+ - "\u0000\u045d\u045e\u0006o\u0018\u0000\u045e\u00ef\u0001\u0000\u0000\u0000"+ - "\u045f\u0460\u0004p\n\u0000\u0460\u0461\u0003\u00b0P\u0000\u0461\u0462"+ - "\u0001\u0000\u0000\u0000\u0462\u0463\u0006p\u0019\u0000\u0463\u00f1\u0001"+ - "\u0000\u0000\u0000\u0464\u0469\u0003N\u001f\u0000\u0465\u0469\u0003L\u001e"+ - "\u0000\u0466\u0469\u0003\\&\u0000\u0467\u0469\u0003\u00a6K\u0000\u0468"+ - "\u0464\u0001\u0000\u0000\u0000\u0468\u0465\u0001\u0000\u0000\u0000\u0468"+ - "\u0466\u0001\u0000\u0000\u0000\u0468\u0467\u0001\u0000\u0000\u0000\u0469"+ - "\u00f3\u0001\u0000\u0000\u0000\u046a\u046d\u0003N\u001f\u0000\u046b\u046d"+ - "\u0003\u00a6K\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046c\u046b\u0001"+ - "\u0000\u0000\u0000\u046d\u0471\u0001\u0000\u0000\u0000\u046e\u0470\u0003"+ - "\u00f2q\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0473\u0001\u0000"+ - "\u0000\u0000\u0471\u046f\u0001\u0000\u0000\u0000\u0471\u0472\u0001\u0000"+ - "\u0000\u0000\u0472\u047e\u0001\u0000\u0000\u0000\u0473\u0471\u0001\u0000"+ - "\u0000\u0000\u0474\u0477\u0003\\&\u0000\u0475\u0477\u0003V#\u0000\u0476"+ - "\u0474\u0001\u0000\u0000\u0000\u0476\u0475\u0001\u0000\u0000\u0000\u0477"+ - "\u0479\u0001\u0000\u0000\u0000\u0478\u047a\u0003\u00f2q\u0000\u0479\u0478"+ - "\u0001\u0000\u0000\u0000\u047a\u047b\u0001\u0000\u0000\u0000\u047b\u0479"+ - "\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c\u047e"+ - "\u0001\u0000\u0000\u0000\u047d\u046c\u0001\u0000\u0000\u0000\u047d\u0476"+ - "\u0001\u0000\u0000\u0000\u047e\u00f5\u0001\u0000\u0000\u0000\u047f\u0482"+ - "\u0003\u00f4r\u0000\u0480\u0482\u0003\u00b8T\u0000\u0481\u047f\u0001\u0000"+ - "\u0000\u0000\u0481\u0480\u0001\u0000\u0000\u0000\u0482\u0483\u0001\u0000"+ - "\u0000\u0000\u0483\u0481\u0001\u0000\u0000\u0000\u0483\u0484\u0001\u0000"+ - "\u0000\u0000\u0484\u00f7\u0001\u0000\u0000\u0000\u0485\u0486\u0003B\u0019"+ - "\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u0488\u0006t\u000b\u0000"+ - "\u0488\u00f9\u0001\u0000\u0000\u0000\u0489\u048a\u0003D\u001a\u0000\u048a"+ - "\u048b\u0001\u0000\u0000\u0000\u048b\u048c\u0006u\u000b\u0000\u048c\u00fb"+ - "\u0001\u0000\u0000\u0000\u048d\u048e\u0003F\u001b\u0000\u048e\u048f\u0001"+ - "\u0000\u0000\u0000\u048f\u0490\u0006v\u000b\u0000\u0490\u00fd\u0001\u0000"+ - "\u0000\u0000\u0491\u0492\u0003J\u001d\u0000\u0492\u0493\u0001\u0000\u0000"+ - "\u0000\u0493\u0494\u0006w\u0011\u0000\u0494\u0495\u0006w\f\u0000\u0495"+ - "\u00ff\u0001\u0000\u0000\u0000\u0496\u0497\u0003l.\u0000\u0497\u0498\u0001"+ - "\u0000\u0000\u0000\u0498\u0499\u0006x\u0014\u0000\u0499\u0101\u0001\u0000"+ - "\u0000\u0000\u049a\u049b\u0003p0\u0000\u049b\u049c\u0001\u0000\u0000\u0000"+ - "\u049c\u049d\u0006y\u0013\u0000\u049d\u0103\u0001\u0000\u0000\u0000\u049e"+ - "\u049f\u0003t2\u0000\u049f\u04a0\u0001\u0000\u0000\u0000\u04a0\u04a1\u0006"+ - "z\u0017\u0000\u04a1\u0105\u0001\u0000\u0000\u0000\u04a2\u04a3\u0004{\u000b"+ - "\u0000\u04a3\u04a4\u0003\u008c>\u0000\u04a4\u04a5\u0001\u0000\u0000\u0000"+ - "\u04a5\u04a6\u0006{\u0018\u0000\u04a6\u0107\u0001\u0000\u0000\u0000\u04a7"+ - "\u04a8\u0004|\f\u0000\u04a8\u04a9\u0003\u00b0P\u0000\u04a9\u04aa\u0001"+ - "\u0000\u0000\u0000\u04aa\u04ab\u0006|\u0019\u0000\u04ab\u0109\u0001\u0000"+ - "\u0000\u0000\u04ac\u04ad\u0007\f\u0000\u0000\u04ad\u04ae\u0007\u0002\u0000"+ - "\u0000\u04ae\u010b\u0001\u0000\u0000\u0000\u04af\u04b0\u0003\u00f6s\u0000"+ - "\u04b0\u04b1\u0001\u0000\u0000\u0000\u04b1\u04b2\u0006~\u001a\u0000\u04b2"+ - "\u010d\u0001\u0000\u0000\u0000\u04b3\u04b4\u0003B\u0019\u0000\u04b4\u04b5"+ - "\u0001\u0000\u0000\u0000\u04b5\u04b6\u0006\u007f\u000b\u0000\u04b6\u010f"+ - "\u0001\u0000\u0000\u0000\u04b7\u04b8\u0003D\u001a\u0000\u04b8\u04b9\u0001"+ - "\u0000\u0000\u0000\u04b9\u04ba\u0006\u0080\u000b\u0000\u04ba\u0111\u0001"+ - "\u0000\u0000\u0000\u04bb\u04bc\u0003F\u001b\u0000\u04bc\u04bd\u0001\u0000"+ - "\u0000\u0000\u04bd\u04be\u0006\u0081\u000b\u0000\u04be\u0113\u0001\u0000"+ - "\u0000\u0000\u04bf\u04c0\u0003J\u001d\u0000\u04c0\u04c1\u0001\u0000\u0000"+ - "\u0000\u04c1\u04c2\u0006\u0082\u0011\u0000\u04c2\u04c3\u0006\u0082\f\u0000"+ - "\u04c3\u0115\u0001\u0000\u0000\u0000\u04c4\u04c5\u0003\u00b2Q\u0000\u04c5"+ - "\u04c6\u0001\u0000\u0000\u0000\u04c6\u04c7\u0006\u0083\u000f\u0000\u04c7"+ - "\u04c8\u0006\u0083\u001b\u0000\u04c8\u0117\u0001\u0000\u0000\u0000\u04c9"+ - "\u04ca\u0007\u0007\u0000\u0000\u04ca\u04cb\u0007\t\u0000\u0000\u04cb\u04cc"+ - "\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000\u04cd\u0119"+ - "\u0001\u0000\u0000\u0000\u04ce\u04cf\u0007\u0013\u0000\u0000\u04cf\u04d0"+ - "\u0007\u0001\u0000\u0000\u04d0\u04d1\u0007\u0005\u0000\u0000\u04d1\u04d2"+ - "\u0007\n\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3\u04d4\u0006"+ - "\u0085\u001c\u0000\u04d4\u011b\u0001\u0000\u0000\u0000\u04d5\u04d6\b#"+ - "\u0000\u0000\u04d6\u011d\u0001\u0000\u0000\u0000\u04d7\u04d9\u0003\u011c"+ - "\u0086\u0000\u04d8\u04d7\u0001\u0000\u0000\u0000\u04d9\u04da\u0001\u0000"+ - "\u0000\u0000\u04da\u04d8\u0001\u0000\u0000\u0000\u04da\u04db\u0001\u0000"+ - "\u0000\u0000\u04db\u04dc\u0001\u0000\u0000\u0000\u04dc\u04dd\u0003H\u001c"+ - "\u0000\u04dd\u04df\u0001\u0000\u0000\u0000\u04de\u04d8\u0001\u0000\u0000"+ - "\u0000\u04de\u04df\u0001\u0000\u0000\u0000\u04df\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e0\u04e2\u0003\u011c\u0086\u0000\u04e1\u04e0\u0001\u0000\u0000"+ - "\u0000\u04e2\u04e3\u0001\u0000\u0000\u0000\u04e3\u04e1\u0001\u0000\u0000"+ - "\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u011f\u0001\u0000\u0000"+ - "\u0000\u04e5\u04e6\u0003\u011e\u0087\u0000\u04e6\u04e7\u0001\u0000\u0000"+ - "\u0000\u04e7\u04e8\u0006\u0088\u001d\u0000\u04e8\u0121\u0001\u0000\u0000"+ - "\u0000\u04e9\u04ea\u0003B\u0019\u0000\u04ea\u04eb\u0001\u0000\u0000\u0000"+ - "\u04eb\u04ec\u0006\u0089\u000b\u0000\u04ec\u0123\u0001\u0000\u0000\u0000"+ - "\u04ed\u04ee\u0003D\u001a\u0000\u04ee\u04ef\u0001\u0000\u0000\u0000\u04ef"+ - "\u04f0\u0006\u008a\u000b\u0000\u04f0\u0125\u0001\u0000\u0000\u0000\u04f1"+ - "\u04f2\u0003F\u001b\u0000\u04f2\u04f3\u0001\u0000\u0000\u0000\u04f3\u04f4"+ - "\u0006\u008b\u000b\u0000\u04f4\u0127\u0001\u0000\u0000\u0000\u04f5\u04f6"+ - "\u0003J\u001d\u0000\u04f6\u04f7\u0001\u0000\u0000\u0000\u04f7\u04f8\u0006"+ - "\u008c\u0011\u0000\u04f8\u04f9\u0006\u008c\f\u0000\u04f9\u04fa\u0006\u008c"+ - "\f\u0000\u04fa\u0129\u0001\u0000\u0000\u0000\u04fb\u04fc\u0003l.\u0000"+ - "\u04fc\u04fd\u0001\u0000\u0000\u0000\u04fd\u04fe\u0006\u008d\u0014\u0000"+ - "\u04fe\u012b\u0001\u0000\u0000\u0000\u04ff\u0500\u0003p0\u0000\u0500\u0501"+ - "\u0001\u0000\u0000\u0000\u0501\u0502\u0006\u008e\u0013\u0000\u0502\u012d"+ - "\u0001\u0000\u0000\u0000\u0503\u0504\u0003t2\u0000\u0504\u0505\u0001\u0000"+ - "\u0000\u0000\u0505\u0506\u0006\u008f\u0017\u0000\u0506\u012f\u0001\u0000"+ - "\u0000\u0000\u0507\u0508\u0003\u011a\u0085\u0000\u0508\u0509\u0001\u0000"+ - "\u0000\u0000\u0509\u050a\u0006\u0090\u001e\u0000\u050a\u0131\u0001\u0000"+ - "\u0000\u0000\u050b\u050c\u0003\u00f6s\u0000\u050c\u050d\u0001\u0000\u0000"+ - "\u0000\u050d\u050e\u0006\u0091\u001a\u0000\u050e\u0133\u0001\u0000\u0000"+ - "\u0000\u050f\u0510\u0003\u00baU\u0000\u0510\u0511\u0001\u0000\u0000\u0000"+ - "\u0511\u0512\u0006\u0092\u001f\u0000\u0512\u0135\u0001\u0000\u0000\u0000"+ - "\u0513\u0514\u0004\u0093\r\u0000\u0514\u0515\u0003\u008c>\u0000\u0515"+ - "\u0516\u0001\u0000\u0000\u0000\u0516\u0517\u0006\u0093\u0018\u0000\u0517"+ - "\u0137\u0001\u0000\u0000\u0000\u0518\u0519\u0004\u0094\u000e\u0000\u0519"+ - "\u051a\u0003\u00b0P\u0000\u051a\u051b\u0001\u0000\u0000\u0000\u051b\u051c"+ - "\u0006\u0094\u0019\u0000\u051c\u0139\u0001\u0000\u0000\u0000\u051d\u051e"+ - "\u0003B\u0019\u0000\u051e\u051f\u0001\u0000\u0000\u0000\u051f\u0520\u0006"+ - "\u0095\u000b\u0000\u0520\u013b\u0001\u0000\u0000\u0000\u0521\u0522\u0003"+ - "D\u001a\u0000\u0522\u0523\u0001\u0000\u0000\u0000\u0523\u0524\u0006\u0096"+ - "\u000b\u0000\u0524\u013d\u0001\u0000\u0000\u0000\u0525\u0526\u0003F\u001b"+ - "\u0000\u0526\u0527\u0001\u0000\u0000\u0000\u0527\u0528\u0006\u0097\u000b"+ - "\u0000\u0528\u013f\u0001\u0000\u0000\u0000\u0529\u052a\u0003J\u001d\u0000"+ - "\u052a\u052b\u0001\u0000\u0000\u0000\u052b\u052c\u0006\u0098\u0011\u0000"+ - "\u052c\u052d\u0006\u0098\f\u0000\u052d\u0141\u0001\u0000\u0000\u0000\u052e"+ - "\u052f\u0003t2\u0000\u052f\u0530\u0001\u0000\u0000\u0000\u0530\u0531\u0006"+ - "\u0099\u0017\u0000\u0531\u0143\u0001\u0000\u0000\u0000\u0532\u0533\u0004"+ - "\u009a\u000f\u0000\u0533\u0534\u0003\u008c>\u0000\u0534\u0535\u0001\u0000"+ - "\u0000\u0000\u0535\u0536\u0006\u009a\u0018\u0000\u0536\u0145\u0001\u0000"+ - "\u0000\u0000\u0537\u0538\u0004\u009b\u0010\u0000\u0538\u0539\u0003\u00b0"+ - "P\u0000\u0539\u053a\u0001\u0000\u0000\u0000\u053a\u053b\u0006\u009b\u0019"+ - "\u0000\u053b\u0147\u0001\u0000\u0000\u0000\u053c\u053d\u0003\u00baU\u0000"+ - "\u053d\u053e\u0001\u0000\u0000\u0000\u053e\u053f\u0006\u009c\u001f\u0000"+ - "\u053f\u0149\u0001\u0000\u0000\u0000\u0540\u0541\u0003\u00b6S\u0000\u0541"+ - "\u0542\u0001\u0000\u0000\u0000\u0542\u0543\u0006\u009d \u0000\u0543\u014b"+ - "\u0001\u0000\u0000\u0000\u0544\u0545\u0003B\u0019\u0000\u0545\u0546\u0001"+ - "\u0000\u0000\u0000\u0546\u0547\u0006\u009e\u000b\u0000\u0547\u014d\u0001"+ - "\u0000\u0000\u0000\u0548\u0549\u0003D\u001a\u0000\u0549\u054a\u0001\u0000"+ - "\u0000\u0000\u054a\u054b\u0006\u009f\u000b\u0000\u054b\u014f\u0001\u0000"+ - "\u0000\u0000\u054c\u054d\u0003F\u001b\u0000\u054d\u054e\u0001\u0000\u0000"+ - "\u0000\u054e\u054f\u0006\u00a0\u000b\u0000\u054f\u0151\u0001\u0000\u0000"+ - "\u0000\u0550\u0551\u0003J\u001d\u0000\u0551\u0552\u0001\u0000\u0000\u0000"+ - "\u0552\u0553\u0006\u00a1\u0011\u0000\u0553\u0554\u0006\u00a1\f\u0000\u0554"+ - "\u0153\u0001\u0000\u0000\u0000\u0555\u0556\u0007\u0001\u0000\u0000\u0556"+ - "\u0557\u0007\t\u0000\u0000\u0557\u0558\u0007\u000f\u0000\u0000\u0558\u0559"+ - "\u0007\u0007\u0000\u0000\u0559\u0155\u0001\u0000\u0000\u0000\u055a\u055b"+ - "\u0003B\u0019\u0000\u055b\u055c\u0001\u0000\u0000\u0000\u055c\u055d\u0006"+ - "\u00a3\u000b\u0000\u055d\u0157\u0001\u0000\u0000\u0000\u055e\u055f\u0003"+ - "D\u001a\u0000\u055f\u0560\u0001\u0000\u0000\u0000\u0560\u0561\u0006\u00a4"+ - "\u000b\u0000\u0561\u0159\u0001\u0000\u0000\u0000\u0562\u0563\u0003F\u001b"+ - "\u0000\u0563\u0564\u0001\u0000\u0000\u0000\u0564\u0565\u0006\u00a5\u000b"+ - "\u0000\u0565\u015b\u0001\u0000\u0000\u0000\u0566\u0567\u0003\u00b4R\u0000"+ - "\u0567\u0568\u0001\u0000\u0000\u0000\u0568\u0569\u0006\u00a6\u0012\u0000"+ - "\u0569\u056a\u0006\u00a6\f\u0000\u056a\u015d\u0001\u0000\u0000\u0000\u056b"+ - "\u056c\u0003H\u001c\u0000\u056c\u056d\u0001\u0000\u0000\u0000\u056d\u056e"+ - "\u0006\u00a7\r\u0000\u056e\u015f\u0001\u0000\u0000\u0000\u056f\u0575\u0003"+ - "V#\u0000\u0570\u0575\u0003L\u001e\u0000\u0571\u0575\u0003t2\u0000\u0572"+ - "\u0575\u0003N\u001f\u0000\u0573\u0575\u0003\\&\u0000\u0574\u056f\u0001"+ - "\u0000\u0000\u0000\u0574\u0570\u0001\u0000\u0000\u0000\u0574\u0571\u0001"+ - "\u0000\u0000\u0000\u0574\u0572\u0001\u0000\u0000\u0000\u0574\u0573\u0001"+ - "\u0000\u0000\u0000\u0575\u0576\u0001\u0000\u0000\u0000\u0576\u0574\u0001"+ - "\u0000\u0000\u0000\u0576\u0577\u0001\u0000\u0000\u0000\u0577\u0161\u0001"+ - "\u0000\u0000\u0000\u0578\u0579\u0003B\u0019\u0000\u0579\u057a\u0001\u0000"+ - "\u0000\u0000\u057a\u057b\u0006\u00a9\u000b\u0000\u057b\u0163\u0001\u0000"+ - "\u0000\u0000\u057c\u057d\u0003D\u001a\u0000\u057d\u057e\u0001\u0000\u0000"+ - "\u0000\u057e\u057f\u0006\u00aa\u000b\u0000\u057f\u0165\u0001\u0000\u0000"+ - "\u0000\u0580\u0581\u0003F\u001b\u0000\u0581\u0582\u0001\u0000\u0000\u0000"+ - "\u0582\u0583\u0006\u00ab\u000b\u0000\u0583\u0167\u0001\u0000\u0000\u0000"+ - "\u0584\u0585\u0003J\u001d\u0000\u0585\u0586\u0001\u0000\u0000\u0000\u0586"+ - "\u0587\u0006\u00ac\u0011\u0000\u0587\u0588\u0006\u00ac\f\u0000\u0588\u0169"+ - "\u0001\u0000\u0000\u0000\u0589\u058a\u0003H\u001c\u0000\u058a\u058b\u0001"+ - "\u0000\u0000\u0000\u058b\u058c\u0006\u00ad\r\u0000\u058c\u016b\u0001\u0000"+ - "\u0000\u0000\u058d\u058e\u0003p0\u0000\u058e\u058f\u0001\u0000\u0000\u0000"+ - "\u058f\u0590\u0006\u00ae\u0013\u0000\u0590\u016d\u0001\u0000\u0000\u0000"+ - "\u0591\u0592\u0003t2\u0000\u0592\u0593\u0001\u0000\u0000\u0000\u0593\u0594"+ - "\u0006\u00af\u0017\u0000\u0594\u016f\u0001\u0000\u0000\u0000\u0595\u0596"+ - "\u0003\u0118\u0084\u0000\u0596\u0597\u0001\u0000\u0000\u0000\u0597\u0598"+ - "\u0006\u00b0!\u0000\u0598\u0599\u0006\u00b0\"\u0000\u0599\u0171\u0001"+ - "\u0000\u0000\u0000\u059a\u059b\u0003\u00dcf\u0000\u059b\u059c\u0001\u0000"+ - "\u0000\u0000\u059c\u059d\u0006\u00b1\u0015\u0000\u059d\u0173\u0001\u0000"+ - "\u0000\u0000\u059e\u059f\u0003`(\u0000\u059f\u05a0\u0001\u0000\u0000\u0000"+ - "\u05a0\u05a1\u0006\u00b2\u0016\u0000\u05a1\u0175\u0001\u0000\u0000\u0000"+ - "\u05a2\u05a3\u0003B\u0019\u0000\u05a3\u05a4\u0001\u0000\u0000\u0000\u05a4"+ - "\u05a5\u0006\u00b3\u000b\u0000\u05a5\u0177\u0001\u0000\u0000\u0000\u05a6"+ - "\u05a7\u0003D\u001a\u0000\u05a7\u05a8\u0001\u0000\u0000\u0000\u05a8\u05a9"+ - "\u0006\u00b4\u000b\u0000\u05a9\u0179\u0001\u0000\u0000\u0000\u05aa\u05ab"+ - "\u0003F\u001b\u0000\u05ab\u05ac\u0001\u0000\u0000\u0000\u05ac\u05ad\u0006"+ - "\u00b5\u000b\u0000\u05ad\u017b\u0001\u0000\u0000\u0000\u05ae\u05af\u0003"+ - "J\u001d\u0000\u05af\u05b0\u0001\u0000\u0000\u0000\u05b0\u05b1\u0006\u00b6"+ - "\u0011\u0000\u05b1\u05b2\u0006\u00b6\f\u0000\u05b2\u05b3\u0006\u00b6\f"+ - "\u0000\u05b3\u017d\u0001\u0000\u0000\u0000\u05b4\u05b5\u0003p0\u0000\u05b5"+ - "\u05b6\u0001\u0000\u0000\u0000\u05b6\u05b7\u0006\u00b7\u0013\u0000\u05b7"+ - "\u017f\u0001\u0000\u0000\u0000\u05b8\u05b9\u0003t2\u0000\u05b9\u05ba\u0001"+ - "\u0000\u0000\u0000\u05ba\u05bb\u0006\u00b8\u0017\u0000\u05bb\u0181\u0001"+ - "\u0000\u0000\u0000\u05bc\u05bd\u0003\u00f6s\u0000\u05bd\u05be\u0001\u0000"+ - "\u0000\u0000\u05be\u05bf\u0006\u00b9\u001a\u0000\u05bf\u0183\u0001\u0000"+ - "\u0000\u0000\u05c0\u05c1\u0003B\u0019\u0000\u05c1\u05c2\u0001\u0000\u0000"+ - "\u0000\u05c2\u05c3\u0006\u00ba\u000b\u0000\u05c3\u0185\u0001\u0000\u0000"+ - "\u0000\u05c4\u05c5\u0003D\u001a\u0000\u05c5\u05c6\u0001\u0000\u0000\u0000"+ - "\u05c6\u05c7\u0006\u00bb\u000b\u0000\u05c7\u0187\u0001\u0000\u0000\u0000"+ - "\u05c8\u05c9\u0003F\u001b\u0000\u05c9\u05ca\u0001\u0000\u0000\u0000\u05ca"+ - "\u05cb\u0006\u00bc\u000b\u0000\u05cb\u0189\u0001\u0000\u0000\u0000\u05cc"+ - "\u05cd\u0003J\u001d\u0000\u05cd\u05ce\u0001\u0000\u0000\u0000\u05ce\u05cf"+ - "\u0006\u00bd\u0011\u0000\u05cf\u05d0\u0006\u00bd\f\u0000\u05d0\u018b\u0001"+ - "\u0000\u0000\u0000\u05d1\u05d2\u00036\u0013\u0000\u05d2\u05d3\u0001\u0000"+ - "\u0000\u0000\u05d3\u05d4\u0006\u00be#\u0000\u05d4\u018d\u0001\u0000\u0000"+ - "\u0000\u05d5\u05d6\u0003\u010a}\u0000\u05d6\u05d7\u0001\u0000\u0000\u0000"+ - "\u05d7\u05d8\u0006\u00bf$\u0000\u05d8\u018f\u0001\u0000\u0000\u0000\u05d9"+ - "\u05da\u0003\u0118\u0084\u0000\u05da\u05db\u0001\u0000\u0000\u0000\u05db"+ - "\u05dc\u0006\u00c0!\u0000\u05dc\u05dd\u0006\u00c0\f\u0000\u05dd\u05de"+ - "\u0006\u00c0\u0000\u0000\u05de\u0191\u0001\u0000\u0000\u0000\u05df\u05e0"+ - "\u0007\u0014\u0000\u0000\u05e0\u05e1\u0007\u0002\u0000\u0000\u05e1\u05e2"+ - "\u0007\u0001\u0000\u0000\u05e2\u05e3\u0007\t\u0000\u0000\u05e3\u05e4\u0007"+ - "\u0011\u0000\u0000\u05e4\u05e5\u0001\u0000\u0000\u0000\u05e5\u05e6\u0006"+ - "\u00c1\f\u0000\u05e6\u05e7\u0006\u00c1\u0000\u0000\u05e7\u0193\u0001\u0000"+ - "\u0000\u0000\u05e8\u05e9\u0003\u00b6S\u0000\u05e9\u05ea\u0001\u0000\u0000"+ - "\u0000\u05ea\u05eb\u0006\u00c2 \u0000\u05eb\u0195\u0001\u0000\u0000\u0000"+ - "\u05ec\u05ed\u0003\u00baU\u0000\u05ed\u05ee\u0001\u0000\u0000\u0000\u05ee"+ - "\u05ef\u0006\u00c3\u001f\u0000\u05ef\u0197\u0001\u0000\u0000\u0000\u05f0"+ - "\u05f1\u0003B\u0019\u0000\u05f1\u05f2\u0001\u0000\u0000\u0000\u05f2\u05f3"+ - "\u0006\u00c4\u000b\u0000\u05f3\u0199\u0001\u0000\u0000\u0000\u05f4\u05f5"+ - "\u0003D\u001a\u0000\u05f5\u05f6\u0001\u0000\u0000\u0000\u05f6\u05f7\u0006"+ - "\u00c5\u000b\u0000\u05f7\u019b\u0001\u0000\u0000\u0000\u05f8\u05f9\u0003"+ - "F\u001b\u0000\u05f9\u05fa\u0001\u0000\u0000\u0000\u05fa\u05fb\u0006\u00c6"+ - "\u000b\u0000\u05fb\u019d\u0001\u0000\u0000\u0000\u05fc\u05fd\u0003J\u001d"+ - "\u0000\u05fd\u05fe\u0001\u0000\u0000\u0000\u05fe\u05ff\u0006\u00c7\u0011"+ - "\u0000\u05ff\u0600\u0006\u00c7\f\u0000\u0600\u019f\u0001\u0000\u0000\u0000"+ - "\u0601\u0602\u0003\u00dcf\u0000\u0602\u0603\u0001\u0000\u0000\u0000\u0603"+ - "\u0604\u0006\u00c8\u0015\u0000\u0604\u0605\u0006\u00c8\f\u0000\u0605\u0606"+ - "\u0006\u00c8%\u0000\u0606\u01a1\u0001\u0000\u0000\u0000\u0607\u0608\u0003"+ - "`(\u0000\u0608\u0609\u0001\u0000\u0000\u0000\u0609\u060a\u0006\u00c9\u0016"+ - "\u0000\u060a\u060b\u0006\u00c9\f\u0000\u060b\u060c\u0006\u00c9%\u0000"+ - "\u060c\u01a3\u0001\u0000\u0000\u0000\u060d\u060e\u0003B\u0019\u0000\u060e"+ - "\u060f\u0001\u0000\u0000\u0000\u060f\u0610\u0006\u00ca\u000b\u0000\u0610"+ - "\u01a5\u0001\u0000\u0000\u0000\u0611\u0612\u0003D\u001a\u0000\u0612\u0613"+ - "\u0001\u0000\u0000\u0000\u0613\u0614\u0006\u00cb\u000b\u0000\u0614\u01a7"+ - "\u0001\u0000\u0000\u0000\u0615\u0616\u0003F\u001b\u0000\u0616\u0617\u0001"+ - "\u0000\u0000\u0000\u0617\u0618\u0006\u00cc\u000b\u0000\u0618\u01a9\u0001"+ - "\u0000\u0000\u0000\u0619\u061a\u0003H\u001c\u0000\u061a\u061b\u0001\u0000"+ - "\u0000\u0000\u061b\u061c\u0006\u00cd\r\u0000\u061c\u061d\u0006\u00cd\f"+ - "\u0000\u061d\u061e\u0006\u00cd\t\u0000\u061e\u01ab\u0001\u0000\u0000\u0000"+ - "\u061f\u0620\u0003p0\u0000\u0620\u0621\u0001\u0000\u0000\u0000\u0621\u0622"+ - "\u0006\u00ce\u0013\u0000\u0622\u0623\u0006\u00ce\f\u0000\u0623\u0624\u0006"+ - "\u00ce\t\u0000\u0624\u01ad\u0001\u0000\u0000\u0000\u0625\u0626\u0003B"+ - "\u0019\u0000\u0626\u0627\u0001\u0000\u0000\u0000\u0627\u0628\u0006\u00cf"+ - "\u000b\u0000\u0628\u01af\u0001\u0000\u0000\u0000\u0629\u062a\u0003D\u001a"+ - "\u0000\u062a\u062b\u0001\u0000\u0000\u0000\u062b\u062c\u0006\u00d0\u000b"+ - "\u0000\u062c\u01b1\u0001\u0000\u0000\u0000\u062d\u062e\u0003F\u001b\u0000"+ - "\u062e\u062f\u0001\u0000\u0000\u0000\u062f\u0630\u0006\u00d1\u000b\u0000"+ - "\u0630\u01b3\u0001\u0000\u0000\u0000\u0631\u0632\u0003\u00baU\u0000\u0632"+ - "\u0633\u0001\u0000\u0000\u0000\u0633\u0634\u0006\u00d2\f\u0000\u0634\u0635"+ - "\u0006\u00d2\u0000\u0000\u0635\u0636\u0006\u00d2\u001f\u0000\u0636\u01b5"+ - "\u0001\u0000\u0000\u0000\u0637\u0638\u0003\u00b6S\u0000\u0638\u0639\u0001"+ - "\u0000\u0000\u0000\u0639\u063a\u0006\u00d3\f\u0000\u063a\u063b\u0006\u00d3"+ - "\u0000\u0000\u063b\u063c\u0006\u00d3 \u0000\u063c\u01b7\u0001\u0000\u0000"+ - "\u0000\u063d\u063e\u0003f+\u0000\u063e\u063f\u0001\u0000\u0000\u0000\u063f"+ - "\u0640\u0006\u00d4\f\u0000\u0640\u0641\u0006\u00d4\u0000\u0000\u0641\u0642"+ - "\u0006\u00d4&\u0000\u0642\u01b9\u0001\u0000\u0000\u0000\u0643\u0644\u0003"+ - "J\u001d\u0000\u0644\u0645\u0001\u0000\u0000\u0000\u0645\u0646\u0006\u00d5"+ - "\u0011\u0000\u0646\u0647\u0006\u00d5\f\u0000\u0647\u01bb\u0001\u0000\u0000"+ + "k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ + "m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0003p\u0462\bp\u0001q\u0001q\u0003q\u0466"+ + "\bq\u0001q\u0005q\u0469\bq\nq\fq\u046c\tq\u0001q\u0001q\u0003q\u0470\b"+ + "q\u0001q\u0004q\u0473\bq\u000bq\fq\u0474\u0003q\u0477\bq\u0001r\u0001"+ + "r\u0004r\u047b\br\u000br\fr\u047c\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ + "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ + "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ + "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001"+ + "}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001"+ + "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ + "\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ + "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0086\u0004"+ + "\u0086\u04d2\b\u0086\u000b\u0086\f\u0086\u04d3\u0001\u0086\u0001\u0086"+ + "\u0003\u0086\u04d8\b\u0086\u0001\u0086\u0004\u0086\u04db\b\u0086\u000b"+ + "\u0086\f\u0086\u04dc\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ + "\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001"+ + "\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ + "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ + "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ + "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ + "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ + "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001"+ + "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ + "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ + "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ + "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001"+ + "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ + "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ + "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ + "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ + "\u00a7\u0004\u00a7\u056e\b\u00a7\u000b\u00a7\f\u00a7\u056f\u0001\u00a8"+ + "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ + "\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ + "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac"+ + "\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ + "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af"+ + "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ + "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2"+ + "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ + "\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5"+ + "\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ + "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ + "\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9"+ + "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ + "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ + "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ + "\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be"+ + "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ + "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ + "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3"+ + "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ + "\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6"+ + "\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7"+ + "\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8"+ + "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ + "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ + "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ + "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ + "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce"+ + "\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ + "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1"+ + "\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2"+ + "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ + "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4"+ + "\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0002\u02ab\u02f0\u0000\u00d5\u0010"+ + "\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c"+ + "\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u0012"+ + "4\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001c"+ + "H\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000"+ + "\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ + "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ + ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac"+ + "\u0000\u00aeE\u00b0F\u00b2G\u00b4H\u00b6\u0000\u00b8I\u00baJ\u00bcK\u00be"+ + "L\u00c0\u0000\u00c2\u0000\u00c4M\u00c6N\u00c8O\u00ca\u0000\u00cc\u0000"+ + "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6P\u00d8\u0000\u00da"+ + "Q\u00dc\u0000\u00de\u0000\u00e0R\u00e2S\u00e4T\u00e6\u0000\u00e8\u0000"+ + "\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4U\u00f6"+ + "V\u00f8W\u00faX\u00fc\u0000\u00fe\u0000\u0100\u0000\u0102\u0000\u0104"+ + "\u0000\u0106\u0000\u0108Y\u010a\u0000\u010cZ\u010e[\u0110\\\u0112\u0000"+ + "\u0114\u0000\u0116]\u0118^\u011a\u0000\u011c_\u011e\u0000\u0120`\u0122"+ + "a\u0124b\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ + "\u0000\u0132\u0000\u0134\u0000\u0136\u0000\u0138c\u013ad\u013ce\u013e"+ + "\u0000\u0140\u0000\u0142\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a"+ + "f\u014cg\u014eh\u0150\u0000\u0152i\u0154j\u0156k\u0158l\u015a\u0000\u015c"+ + "\u0000\u015em\u0160n\u0162o\u0164p\u0166\u0000\u0168\u0000\u016a\u0000"+ + "\u016c\u0000\u016e\u0000\u0170\u0000\u0172\u0000\u0174q\u0176r\u0178s"+ + "\u017a\u0000\u017c\u0000\u017e\u0000\u0180\u0000\u0182t\u0184u\u0186v"+ + "\u0188\u0000\u018a\u0000\u018c\u0000\u018e\u0000\u0190w\u0192\u0000\u0194"+ + "\u0000\u0196x\u0198y\u019az\u019c\u0000\u019e\u0000\u01a0\u0000\u01a2"+ + "{\u01a4|\u01a6}\u01a8\u0000\u01aa\u0000\u01ac~\u01ae\u007f\u01b0\u0080"+ + "\u01b2\u0000\u01b4\u0000\u01b6\u0000\u01b8\u0000\u0010\u0000\u0001\u0002"+ + "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000"+ + "DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ + "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ + "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ + "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ + "KKkk\u0002\u0000WWww\u0002\u0000UUuu\u0002\u0000JJjj\u0006\u0000\t\n\r"+ + "\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002"+ + "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ + "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ + "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ + "//::<<>?\\\\||\u065c\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000"+ + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000"+ + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000,"+ + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000"+ + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000"+ + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:"+ + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000"+ + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000"+ + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0001H"+ + "\u0001\u0000\u0000\u0000\u0001^\u0001\u0000\u0000\u0000\u0001`\u0001\u0000"+ + "\u0000\u0000\u0001b\u0001\u0000\u0000\u0000\u0001d\u0001\u0000\u0000\u0000"+ + "\u0001f\u0001\u0000\u0000\u0000\u0001h\u0001\u0000\u0000\u0000\u0001j"+ + "\u0001\u0000\u0000\u0000\u0001l\u0001\u0000\u0000\u0000\u0001n\u0001\u0000"+ + "\u0000\u0000\u0001p\u0001\u0000\u0000\u0000\u0001r\u0001\u0000\u0000\u0000"+ + "\u0001t\u0001\u0000\u0000\u0000\u0001v\u0001\u0000\u0000\u0000\u0001x"+ + "\u0001\u0000\u0000\u0000\u0001z\u0001\u0000\u0000\u0000\u0001|\u0001\u0000"+ + "\u0000\u0000\u0001~\u0001\u0000\u0000\u0000\u0001\u0080\u0001\u0000\u0000"+ + "\u0000\u0001\u0082\u0001\u0000\u0000\u0000\u0001\u0084\u0001\u0000\u0000"+ + "\u0000\u0001\u0086\u0001\u0000\u0000\u0000\u0001\u0088\u0001\u0000\u0000"+ + "\u0000\u0001\u008a\u0001\u0000\u0000\u0000\u0001\u008c\u0001\u0000\u0000"+ + "\u0000\u0001\u008e\u0001\u0000\u0000\u0000\u0001\u0090\u0001\u0000\u0000"+ + "\u0000\u0001\u0092\u0001\u0000\u0000\u0000\u0001\u0094\u0001\u0000\u0000"+ + "\u0000\u0001\u0096\u0001\u0000\u0000\u0000\u0001\u0098\u0001\u0000\u0000"+ + "\u0000\u0001\u009a\u0001\u0000\u0000\u0000\u0001\u009c\u0001\u0000\u0000"+ + "\u0000\u0001\u009e\u0001\u0000\u0000\u0000\u0001\u00a0\u0001\u0000\u0000"+ + "\u0000\u0001\u00a2\u0001\u0000\u0000\u0000\u0001\u00a4\u0001\u0000\u0000"+ + "\u0000\u0001\u00a6\u0001\u0000\u0000\u0000\u0001\u00a8\u0001\u0000\u0000"+ + "\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000"+ + "\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000"+ + "\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000"+ + "\u0000\u0001\u00b8\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000"+ + "\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000"+ + "\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000"+ + "\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000"+ + "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000"+ + "\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ + "\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000"+ + "\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000"+ + "\u0000\u0003\u00da\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000"+ + "\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000"+ + "\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000"+ + "\u0000\u0004\u00e6\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000"+ + "\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000"+ + "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f4\u0001\u0000\u0000"+ + "\u0000\u0004\u00f6\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ + "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000"+ + "\u0000\u0005\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ + "\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000"+ + "\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000"+ + "\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000"+ + "\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000"+ + "\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000"+ + "\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000"+ + "\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000"+ + "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ + "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0007\u0126\u0001\u0000\u0000"+ + "\u0000\u0007\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ + "\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000"+ + "\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000"+ + "\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000"+ + "\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000"+ + "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\b\u013e\u0001\u0000\u0000\u0000"+ + "\b\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144"+ + "\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001"+ + "\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000"+ + "\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\t\u0150\u0001\u0000\u0000"+ + "\u0000\t\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000"+ + "\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\n\u015a"+ + "\u0001\u0000\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001"+ + "\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000"+ + "\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\u000b\u0166\u0001\u0000\u0000"+ + "\u0000\u000b\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ + "\u0000\u000b\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000"+ + "\u0000\u000b\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000"+ + "\u0000\u000b\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000"+ + "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\f\u017a\u0001\u0000\u0000\u0000"+ + "\f\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180"+ + "\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001"+ + "\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\r\u0188\u0001\u0000"+ + "\u0000\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000"+ + "\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000"+ + "\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196"+ + "\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001"+ + "\u0000\u0000\u0000\u000e\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001"+ + "\u0000\u0000\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001"+ + "\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001"+ + "\u0000\u0000\u0000\u000f\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001"+ + "\u0000\u0000\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001"+ + "\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001"+ + "\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001"+ + "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u0010\u01ba\u0001"+ + "\u0000\u0000\u0000\u0012\u01c4\u0001\u0000\u0000\u0000\u0014\u01cb\u0001"+ + "\u0000\u0000\u0000\u0016\u01d4\u0001\u0000\u0000\u0000\u0018\u01db\u0001"+ + "\u0000\u0000\u0000\u001a\u01e5\u0001\u0000\u0000\u0000\u001c\u01ec\u0001"+ + "\u0000\u0000\u0000\u001e\u01f3\u0001\u0000\u0000\u0000 \u01fa\u0001\u0000"+ + "\u0000\u0000\"\u0202\u0001\u0000\u0000\u0000$\u020e\u0001\u0000\u0000"+ + "\u0000&\u0217\u0001\u0000\u0000\u0000(\u021d\u0001\u0000\u0000\u0000*"+ + "\u0224\u0001\u0000\u0000\u0000,\u022b\u0001\u0000\u0000\u0000.\u0233\u0001"+ + "\u0000\u0000\u00000\u023b\u0001\u0000\u0000\u00002\u024a\u0001\u0000\u0000"+ + "\u00004\u0256\u0001\u0000\u0000\u00006\u0261\u0001\u0000\u0000\u00008"+ + "\u0269\u0001\u0000\u0000\u0000:\u0271\u0001\u0000\u0000\u0000<\u0279\u0001"+ + "\u0000\u0000\u0000>\u0282\u0001\u0000\u0000\u0000@\u028d\u0001\u0000\u0000"+ + "\u0000B\u0293\u0001\u0000\u0000\u0000D\u02a4\u0001\u0000\u0000\u0000F"+ + "\u02b4\u0001\u0000\u0000\u0000H\u02ba\u0001\u0000\u0000\u0000J\u02be\u0001"+ + "\u0000\u0000\u0000L\u02c0\u0001\u0000\u0000\u0000N\u02c2\u0001\u0000\u0000"+ + "\u0000P\u02c5\u0001\u0000\u0000\u0000R\u02c7\u0001\u0000\u0000\u0000T"+ + "\u02d0\u0001\u0000\u0000\u0000V\u02d2\u0001\u0000\u0000\u0000X\u02d7\u0001"+ + "\u0000\u0000\u0000Z\u02d9\u0001\u0000\u0000\u0000\\\u02de\u0001\u0000"+ + "\u0000\u0000^\u02fd\u0001\u0000\u0000\u0000`\u0300\u0001\u0000\u0000\u0000"+ + "b\u032e\u0001\u0000\u0000\u0000d\u0330\u0001\u0000\u0000\u0000f\u0333"+ + "\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000j\u033b\u0001\u0000"+ + "\u0000\u0000l\u033d\u0001\u0000\u0000\u0000n\u0340\u0001\u0000\u0000\u0000"+ + "p\u0342\u0001\u0000\u0000\u0000r\u0344\u0001\u0000\u0000\u0000t\u0349"+ + "\u0001\u0000\u0000\u0000v\u034b\u0001\u0000\u0000\u0000x\u0351\u0001\u0000"+ + "\u0000\u0000z\u0357\u0001\u0000\u0000\u0000|\u035a\u0001\u0000\u0000\u0000"+ + "~\u035d\u0001\u0000\u0000\u0000\u0080\u0362\u0001\u0000\u0000\u0000\u0082"+ + "\u0367\u0001\u0000\u0000\u0000\u0084\u0369\u0001\u0000\u0000\u0000\u0086"+ + "\u036d\u0001\u0000\u0000\u0000\u0088\u0372\u0001\u0000\u0000\u0000\u008a"+ + "\u0378\u0001\u0000\u0000\u0000\u008c\u037b\u0001\u0000\u0000\u0000\u008e"+ + "\u037d\u0001\u0000\u0000\u0000\u0090\u0383\u0001\u0000\u0000\u0000\u0092"+ + "\u0385\u0001\u0000\u0000\u0000\u0094\u038a\u0001\u0000\u0000\u0000\u0096"+ + "\u038d\u0001\u0000\u0000\u0000\u0098\u0390\u0001\u0000\u0000\u0000\u009a"+ + "\u0393\u0001\u0000\u0000\u0000\u009c\u0395\u0001\u0000\u0000\u0000\u009e"+ + "\u0398\u0001\u0000\u0000\u0000\u00a0\u039a\u0001\u0000\u0000\u0000\u00a2"+ + "\u039d\u0001\u0000\u0000\u0000\u00a4\u039f\u0001\u0000\u0000\u0000\u00a6"+ + "\u03a1\u0001\u0000\u0000\u0000\u00a8\u03a3\u0001\u0000\u0000\u0000\u00aa"+ + "\u03a5\u0001\u0000\u0000\u0000\u00ac\u03a7\u0001\u0000\u0000\u0000\u00ae"+ + "\u03bc\u0001\u0000\u0000\u0000\u00b0\u03be\u0001\u0000\u0000\u0000\u00b2"+ + "\u03c3\u0001\u0000\u0000\u0000\u00b4\u03d8\u0001\u0000\u0000\u0000\u00b6"+ + "\u03da\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ + "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03e8\u0001\u0000\u0000\u0000\u00be"+ + "\u03ec\u0001\u0000\u0000\u0000\u00c0\u03f0\u0001\u0000\u0000\u0000\u00c2"+ + "\u03f5\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ + "\u03fe\u0001\u0000\u0000\u0000\u00c8\u0402\u0001\u0000\u0000\u0000\u00ca"+ + "\u0406\u0001\u0000\u0000\u0000\u00cc\u040b\u0001\u0000\u0000\u0000\u00ce"+ + "\u040f\u0001\u0000\u0000\u0000\u00d0\u0413\u0001\u0000\u0000\u0000\u00d2"+ + "\u0417\u0001\u0000\u0000\u0000\u00d4\u041b\u0001\u0000\u0000\u0000\u00d6"+ + "\u041f\u0001\u0000\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da"+ + "\u042e\u0001\u0000\u0000\u0000\u00dc\u0432\u0001\u0000\u0000\u0000\u00de"+ + "\u0436\u0001\u0000\u0000\u0000\u00e0\u043a\u0001\u0000\u0000\u0000\u00e2"+ + "\u043e\u0001\u0000\u0000\u0000\u00e4\u0442\u0001\u0000\u0000\u0000\u00e6"+ + "\u0446\u0001\u0000\u0000\u0000\u00e8\u044b\u0001\u0000\u0000\u0000\u00ea"+ + "\u044f\u0001\u0000\u0000\u0000\u00ec\u0453\u0001\u0000\u0000\u0000\u00ee"+ + "\u0458\u0001\u0000\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2"+ + "\u0476\u0001\u0000\u0000\u0000\u00f4\u047a\u0001\u0000\u0000\u0000\u00f6"+ + "\u047e\u0001\u0000\u0000\u0000\u00f8\u0482\u0001\u0000\u0000\u0000\u00fa"+ + "\u0486\u0001\u0000\u0000\u0000\u00fc\u048a\u0001\u0000\u0000\u0000\u00fe"+ + "\u048f\u0001\u0000\u0000\u0000\u0100\u0493\u0001\u0000\u0000\u0000\u0102"+ + "\u0497\u0001\u0000\u0000\u0000\u0104\u049b\u0001\u0000\u0000\u0000\u0106"+ + "\u04a0\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ + "\u04a8\u0001\u0000\u0000\u0000\u010c\u04ac\u0001\u0000\u0000\u0000\u010e"+ + "\u04b0\u0001\u0000\u0000\u0000\u0110\u04b4\u0001\u0000\u0000\u0000\u0112"+ + "\u04b8\u0001\u0000\u0000\u0000\u0114\u04bd\u0001\u0000\u0000\u0000\u0116"+ + "\u04c2\u0001\u0000\u0000\u0000\u0118\u04c7\u0001\u0000\u0000\u0000\u011a"+ + "\u04ce\u0001\u0000\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e"+ + "\u04de\u0001\u0000\u0000\u0000\u0120\u04e2\u0001\u0000\u0000\u0000\u0122"+ + "\u04e6\u0001\u0000\u0000\u0000\u0124\u04ea\u0001\u0000\u0000\u0000\u0126"+ + "\u04ee\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ + "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fc\u0001\u0000\u0000\u0000\u012e"+ + "\u0500\u0001\u0000\u0000\u0000\u0130\u0504\u0001\u0000\u0000\u0000\u0132"+ + "\u0508\u0001\u0000\u0000\u0000\u0134\u050c\u0001\u0000\u0000\u0000\u0136"+ + "\u0511\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ + "\u051a\u0001\u0000\u0000\u0000\u013c\u051e\u0001\u0000\u0000\u0000\u013e"+ + "\u0522\u0001\u0000\u0000\u0000\u0140\u0527\u0001\u0000\u0000\u0000\u0142"+ + "\u052b\u0001\u0000\u0000\u0000\u0144\u0530\u0001\u0000\u0000\u0000\u0146"+ + "\u0535\u0001\u0000\u0000\u0000\u0148\u0539\u0001\u0000\u0000\u0000\u014a"+ + "\u053d\u0001\u0000\u0000\u0000\u014c\u0541\u0001\u0000\u0000\u0000\u014e"+ + "\u0545\u0001\u0000\u0000\u0000\u0150\u0549\u0001\u0000\u0000\u0000\u0152"+ + "\u054e\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ + "\u0557\u0001\u0000\u0000\u0000\u0158\u055b\u0001\u0000\u0000\u0000\u015a"+ + "\u055f\u0001\u0000\u0000\u0000\u015c\u0564\u0001\u0000\u0000\u0000\u015e"+ + "\u056d\u0001\u0000\u0000\u0000\u0160\u0571\u0001\u0000\u0000\u0000\u0162"+ + "\u0575\u0001\u0000\u0000\u0000\u0164\u0579\u0001\u0000\u0000\u0000\u0166"+ + "\u057d\u0001\u0000\u0000\u0000\u0168\u0582\u0001\u0000\u0000\u0000\u016a"+ + "\u0586\u0001\u0000\u0000\u0000\u016c\u058a\u0001\u0000\u0000\u0000\u016e"+ + "\u058e\u0001\u0000\u0000\u0000\u0170\u0593\u0001\u0000\u0000\u0000\u0172"+ + "\u0597\u0001\u0000\u0000\u0000\u0174\u059b\u0001\u0000\u0000\u0000\u0176"+ + "\u059f\u0001\u0000\u0000\u0000\u0178\u05a3\u0001\u0000\u0000\u0000\u017a"+ + "\u05a7\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ + "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b5\u0001\u0000\u0000\u0000\u0182"+ + "\u05b9\u0001\u0000\u0000\u0000\u0184\u05bd\u0001\u0000\u0000\u0000\u0186"+ + "\u05c1\u0001\u0000\u0000\u0000\u0188\u05c5\u0001\u0000\u0000\u0000\u018a"+ + "\u05ca\u0001\u0000\u0000\u0000\u018c\u05ce\u0001\u0000\u0000\u0000\u018e"+ + "\u05d2\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ + "\u05e1\u0001\u0000\u0000\u0000\u0194\u05e5\u0001\u0000\u0000\u0000\u0196"+ + "\u05e9\u0001\u0000\u0000\u0000\u0198\u05ed\u0001\u0000\u0000\u0000\u019a"+ + "\u05f1\u0001\u0000\u0000\u0000\u019c\u05f5\u0001\u0000\u0000\u0000\u019e"+ + "\u05fa\u0001\u0000\u0000\u0000\u01a0\u0600\u0001\u0000\u0000\u0000\u01a2"+ + "\u0606\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ + "\u060e\u0001\u0000\u0000\u0000\u01a8\u0612\u0001\u0000\u0000\u0000\u01aa"+ + "\u0618\u0001\u0000\u0000\u0000\u01ac\u061e\u0001\u0000\u0000\u0000\u01ae"+ + "\u0622\u0001\u0000\u0000\u0000\u01b0\u0626\u0001\u0000\u0000\u0000\u01b2"+ + "\u062a\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ + "\u0636\u0001\u0000\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba"+ + "\u01bb\u0007\u0000\u0000\u0000\u01bb\u01bc\u0007\u0001\u0000\u0000\u01bc"+ + "\u01bd\u0007\u0002\u0000\u0000\u01bd\u01be\u0007\u0002\u0000\u0000\u01be"+ + "\u01bf\u0007\u0003\u0000\u0000\u01bf\u01c0\u0007\u0004\u0000\u0000\u01c0"+ + "\u01c1\u0007\u0005\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"+ + "\u01c3\u0006\u0000\u0000\u0000\u01c3\u0011\u0001\u0000\u0000\u0000\u01c4"+ + "\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0006\u0000\u0000\u01c6"+ + "\u01c7\u0007\u0007\u0000\u0000\u01c7\u01c8\u0007\b\u0000\u0000\u01c8\u01c9"+ + "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0006\u0001\u0001\u0000\u01ca\u0013"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0007\u0003\u0000\u0000\u01cc\u01cd"+ + "\u0007\t\u0000\u0000\u01cd\u01ce\u0007\u0006\u0000\u0000\u01ce\u01cf\u0007"+ + "\u0001\u0000\u0000\u01cf\u01d0\u0007\u0004\u0000\u0000\u01d0\u01d1\u0007"+ + "\n\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0002"+ + "\u0002\u0000\u01d3\u0015\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0003"+ + "\u0000\u0000\u01d5\u01d6\u0007\u000b\u0000\u0000\u01d6\u01d7\u0007\f\u0000"+ + "\u0000\u01d7\u01d8\u0007\r\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ + "\u01d9\u01da\u0006\u0003\u0000\u0000\u01da\u0017\u0001\u0000\u0000\u0000"+ + "\u01db\u01dc\u0007\u0003\u0000\u0000\u01dc\u01dd\u0007\u000e\u0000\u0000"+ + "\u01dd\u01de\u0007\b\u0000\u0000\u01de\u01df\u0007\r\u0000\u0000\u01df"+ + "\u01e0\u0007\f\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2"+ + "\u0007\t\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ + "\u0004\u0003\u0000\u01e4\u0019\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007"+ + "\u000f\u0000\u0000\u01e6\u01e7\u0007\u0006\u0000\u0000\u01e7\u01e8\u0007"+ + "\u0007\u0000\u0000\u01e8\u01e9\u0007\u0010\u0000\u0000\u01e9\u01ea\u0001"+ + "\u0000\u0000\u0000\u01ea\u01eb\u0006\u0005\u0004\u0000\u01eb\u001b\u0001"+ + "\u0000\u0000\u0000\u01ec\u01ed\u0007\u0011\u0000\u0000\u01ed\u01ee\u0007"+ + "\u0006\u0000\u0000\u01ee\u01ef\u0007\u0007\u0000\u0000\u01ef\u01f0\u0007"+ + "\u0012\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006"+ + "\u0006\u0000\u0000\u01f2\u001d\u0001\u0000\u0000\u0000\u01f3\u01f4\u0007"+ + "\u0012\u0000\u0000\u01f4\u01f5\u0007\u0003\u0000\u0000\u01f5\u01f6\u0007"+ + "\u0003\u0000\u0000\u01f6\u01f7\u0007\b\u0000\u0000\u01f7\u01f8\u0001\u0000"+ + "\u0000\u0000\u01f8\u01f9\u0006\u0007\u0001\u0000\u01f9\u001f\u0001\u0000"+ + "\u0000\u0000\u01fa\u01fb\u0007\r\u0000\u0000\u01fb\u01fc\u0007\u0001\u0000"+ + "\u0000\u01fc\u01fd\u0007\u0010\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000"+ + "\u0000\u01fe\u01ff\u0007\u0005\u0000\u0000\u01ff\u0200\u0001\u0000\u0000"+ + "\u0000\u0200\u0201\u0006\b\u0000\u0000\u0201!\u0001\u0000\u0000\u0000"+ + "\u0202\u0203\u0007\u0010\u0000\u0000\u0203\u0204\u0007\u000b\u0000\u0000"+ + "\u0204\u0205\u0005_\u0000\u0000\u0205\u0206\u0007\u0003\u0000\u0000\u0206"+ + "\u0207\u0007\u000e\u0000\u0000\u0207\u0208\u0007\b\u0000\u0000\u0208\u0209"+ + "\u0007\f\u0000\u0000\u0209\u020a\u0007\t\u0000\u0000\u020a\u020b\u0007"+ + "\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0006"+ + "\t\u0005\u0000\u020d#\u0001\u0000\u0000\u0000\u020e\u020f\u0007\u0006"+ + "\u0000\u0000\u020f\u0210\u0007\u0003\u0000\u0000\u0210\u0211\u0007\t\u0000"+ + "\u0000\u0211\u0212\u0007\f\u0000\u0000\u0212\u0213\u0007\u0010\u0000\u0000"+ + "\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000"+ + "\u0215\u0216\u0006\n\u0006\u0000\u0216%\u0001\u0000\u0000\u0000\u0217"+ + "\u0218\u0007\u0006\u0000\u0000\u0218\u0219\u0007\u0007\u0000\u0000\u0219"+ + "\u021a\u0007\u0013\u0000\u0000\u021a\u021b\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0006\u000b\u0000\u0000\u021c\'\u0001\u0000\u0000\u0000\u021d\u021e"+ + "\u0007\u0002\u0000\u0000\u021e\u021f\u0007\n\u0000\u0000\u021f\u0220\u0007"+ + "\u0007\u0000\u0000\u0220\u0221\u0007\u0013\u0000\u0000\u0221\u0222\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0006\f\u0007\u0000\u0223)\u0001\u0000"+ + "\u0000\u0000\u0224\u0225\u0007\u0002\u0000\u0000\u0225\u0226\u0007\u0007"+ + "\u0000\u0000\u0226\u0227\u0007\u0006\u0000\u0000\u0227\u0228\u0007\u0005"+ + "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0006\r\u0000"+ + "\u0000\u022a+\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0002\u0000\u0000"+ + "\u022c\u022d\u0007\u0005\u0000\u0000\u022d\u022e\u0007\f\u0000\u0000\u022e"+ + "\u022f\u0007\u0005\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000\u0230"+ + "\u0231\u0001\u0000\u0000\u0000\u0231\u0232\u0006\u000e\u0000\u0000\u0232"+ + "-\u0001\u0000\u0000\u0000\u0233\u0234\u0007\u0013\u0000\u0000\u0234\u0235"+ + "\u0007\n\u0000\u0000\u0235\u0236\u0007\u0003\u0000\u0000\u0236\u0237\u0007"+ + "\u0006\u0000\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0006\u000f\u0000\u0000\u023a/\u0001\u0000"+ + "\u0000\u0000\u023b\u023c\u0004\u0010\u0000\u0000\u023c\u023d\u0007\u0001"+ + "\u0000\u0000\u023d\u023e\u0007\t\u0000\u0000\u023e\u023f\u0007\r\u0000"+ + "\u0000\u023f\u0240\u0007\u0001\u0000\u0000\u0240\u0241\u0007\t\u0000\u0000"+ + "\u0241\u0242\u0007\u0003\u0000\u0000\u0242\u0243\u0007\u0002\u0000\u0000"+ + "\u0243\u0244\u0007\u0005\u0000\u0000\u0244\u0245\u0007\f\u0000\u0000\u0245"+ + "\u0246\u0007\u0005\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000\u0247"+ + "\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0006\u0010\u0000\u0000\u0249"+ + "1\u0001\u0000\u0000\u0000\u024a\u024b\u0004\u0011\u0001\u0000\u024b\u024c"+ + "\u0007\r\u0000\u0000\u024c\u024d\u0007\u0007\u0000\u0000\u024d\u024e\u0007"+ + "\u0007\u0000\u0000\u024e\u024f\u0007\u0012\u0000\u0000\u024f\u0250\u0007"+ + "\u0014\u0000\u0000\u0250\u0251\u0007\b\u0000\u0000\u0251\u0252\u0005_"+ + "\u0000\u0000\u0252\u0253\u0005\u8001\uf414\u0000\u0000\u0253\u0254\u0001"+ + "\u0000\u0000\u0000\u0254\u0255\u0006\u0011\b\u0000\u02553\u0001\u0000"+ + "\u0000\u0000\u0256\u0257\u0004\u0012\u0002\u0000\u0257\u0258\u0007\u0010"+ + "\u0000\u0000\u0258\u0259\u0007\u0003\u0000\u0000\u0259\u025a\u0007\u0005"+ + "\u0000\u0000\u025a\u025b\u0007\u0006\u0000\u0000\u025b\u025c\u0007\u0001"+ + "\u0000\u0000\u025c\u025d\u0007\u0004\u0000\u0000\u025d\u025e\u0007\u0002"+ + "\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0260\u0006\u0012"+ + "\t\u0000\u02605\u0001\u0000\u0000\u0000\u0261\u0262\u0004\u0013\u0003"+ + "\u0000\u0262\u0263\u0007\u0015\u0000\u0000\u0263\u0264\u0007\u0007\u0000"+ + "\u0000\u0264\u0265\u0007\u0001\u0000\u0000\u0265\u0266\u0007\t\u0000\u0000"+ + "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0013\n\u0000\u0268"+ + "7\u0001\u0000\u0000\u0000\u0269\u026a\u0004\u0014\u0004\u0000\u026a\u026b"+ + "\u0007\u000f\u0000\u0000\u026b\u026c\u0007\u0014\u0000\u0000\u026c\u026d"+ + "\u0007\r\u0000\u0000\u026d\u026e\u0007\r\u0000\u0000\u026e\u026f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0006\u0014\n\u0000\u02709\u0001\u0000"+ + "\u0000\u0000\u0271\u0272\u0004\u0015\u0005\u0000\u0272\u0273\u0007\r\u0000"+ + "\u0000\u0273\u0274\u0007\u0003\u0000\u0000\u0274\u0275\u0007\u000f\u0000"+ + "\u0000\u0275\u0276\u0007\u0005\u0000\u0000\u0276\u0277\u0001\u0000\u0000"+ + "\u0000\u0277\u0278\u0006\u0015\n\u0000\u0278;\u0001\u0000\u0000\u0000"+ + "\u0279\u027a\u0004\u0016\u0006\u0000\u027a\u027b\u0007\u0006\u0000\u0000"+ + "\u027b\u027c\u0007\u0001\u0000\u0000\u027c\u027d\u0007\u0011\u0000\u0000"+ + "\u027d\u027e\u0007\n\u0000\u0000\u027e\u027f\u0007\u0005\u0000\u0000\u027f"+ + "\u0280\u0001\u0000\u0000\u0000\u0280\u0281\u0006\u0016\n\u0000\u0281="+ + "\u0001\u0000\u0000\u0000\u0282\u0283\u0004\u0017\u0007\u0000\u0283\u0284"+ + "\u0007\r\u0000\u0000\u0284\u0285\u0007\u0007\u0000\u0000\u0285\u0286\u0007"+ + "\u0007\u0000\u0000\u0286\u0287\u0007\u0012\u0000\u0000\u0287\u0288\u0007"+ + "\u0014\u0000\u0000\u0288\u0289\u0007\b\u0000\u0000\u0289\u028a\u0001\u0000"+ + "\u0000\u0000\u028a\u028b\u0006\u0017\n\u0000\u028b?\u0001\u0000\u0000"+ + "\u0000\u028c\u028e\b\u0016\u0000\u0000\u028d\u028c\u0001\u0000\u0000\u0000"+ + "\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000\u0000\u0000"+ + "\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000"+ + "\u0291\u0292\u0006\u0018\u0000\u0000\u0292A\u0001\u0000\u0000\u0000\u0293"+ + "\u0294\u0005/\u0000\u0000\u0294\u0295\u0005/\u0000\u0000\u0295\u0299\u0001"+ + "\u0000\u0000\u0000\u0296\u0298\b\u0017\u0000\u0000\u0297\u0296\u0001\u0000"+ + "\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000"+ + "\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029d\u0001\u0000"+ + "\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u029e\u0005\r\u0000"+ + "\u0000\u029d\u029c\u0001\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000"+ + "\u0000\u029e\u02a0\u0001\u0000\u0000\u0000\u029f\u02a1\u0005\n\u0000\u0000"+ + "\u02a0\u029f\u0001\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000\u0000\u0000"+ + "\u02a1\u02a2\u0001\u0000\u0000\u0000\u02a2\u02a3\u0006\u0019\u000b\u0000"+ + "\u02a3C\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005/\u0000\u0000\u02a5\u02a6"+ + "\u0005*\u0000\u0000\u02a6\u02ab\u0001\u0000\u0000\u0000\u02a7\u02aa\u0003"+ + "D\u001a\u0000\u02a8\u02aa\t\u0000\u0000\u0000\u02a9\u02a7\u0001\u0000"+ + "\u0000\u0000\u02a9\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ad\u0001\u0000"+ + "\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000"+ + "\u0000\u0000\u02ac\u02ae\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ + "\u0000\u0000\u02ae\u02af\u0005*\u0000\u0000\u02af\u02b0\u0005/\u0000\u0000"+ + "\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b2\u0006\u001a\u000b\u0000"+ + "\u02b2E\u0001\u0000\u0000\u0000\u02b3\u02b5\u0007\u0018\u0000\u0000\u02b4"+ + "\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6"+ + "\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000\u02b7"+ + "\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b9\u0006\u001b\u000b\u0000\u02b9"+ + "G\u0001\u0000\u0000\u0000\u02ba\u02bb\u0005|\u0000\u0000\u02bb\u02bc\u0001"+ + "\u0000\u0000\u0000\u02bc\u02bd\u0006\u001c\f\u0000\u02bdI\u0001\u0000"+ + "\u0000\u0000\u02be\u02bf\u0007\u0019\u0000\u0000\u02bfK\u0001\u0000\u0000"+ + "\u0000\u02c0\u02c1\u0007\u001a\u0000\u0000\u02c1M\u0001\u0000\u0000\u0000"+ + "\u02c2\u02c3\u0005\\\u0000\u0000\u02c3\u02c4\u0007\u001b\u0000\u0000\u02c4"+ + "O\u0001\u0000\u0000\u0000\u02c5\u02c6\b\u001c\u0000\u0000\u02c6Q\u0001"+ + "\u0000\u0000\u0000\u02c7\u02c9\u0007\u0003\u0000\u0000\u02c8\u02ca\u0007"+ + "\u001d\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ + "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0003"+ + "J\u001d\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000"+ + "\u0000\u0000\u02ce\u02cc\u0001\u0000\u0000\u0000\u02ce\u02cf\u0001\u0000"+ + "\u0000\u0000\u02cfS\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005@\u0000\u0000"+ + "\u02d1U\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005`\u0000\u0000\u02d3W"+ + "\u0001\u0000\u0000\u0000\u02d4\u02d8\b\u001e\u0000\u0000\u02d5\u02d6\u0005"+ + "`\u0000\u0000\u02d6\u02d8\u0005`\u0000\u0000\u02d7\u02d4\u0001\u0000\u0000"+ + "\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d8Y\u0001\u0000\u0000\u0000"+ + "\u02d9\u02da\u0005_\u0000\u0000\u02da[\u0001\u0000\u0000\u0000\u02db\u02df"+ + "\u0003L\u001e\u0000\u02dc\u02df\u0003J\u001d\u0000\u02dd\u02df\u0003Z"+ + "%\u0000\u02de\u02db\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000"+ + "\u0000\u02de\u02dd\u0001\u0000\u0000\u0000\u02df]\u0001\u0000\u0000\u0000"+ + "\u02e0\u02e5\u0005\"\u0000\u0000\u02e1\u02e4\u0003N\u001f\u0000\u02e2"+ + "\u02e4\u0003P \u0000\u02e3\u02e1\u0001\u0000\u0000\u0000\u02e3\u02e2\u0001"+ + "\u0000\u0000\u0000\u02e4\u02e7\u0001\u0000\u0000\u0000\u02e5\u02e3\u0001"+ + "\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000\u0000\u02e6\u02e8\u0001"+ + "\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e8\u02fe\u0005"+ + "\"\u0000\u0000\u02e9\u02ea\u0005\"\u0000\u0000\u02ea\u02eb\u0005\"\u0000"+ + "\u0000\u02eb\u02ec\u0005\"\u0000\u0000\u02ec\u02f0\u0001\u0000\u0000\u0000"+ + "\u02ed\u02ef\b\u0017\u0000\u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef"+ + "\u02f2\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f0"+ + "\u02ee\u0001\u0000\u0000\u0000\u02f1\u02f3\u0001\u0000\u0000\u0000\u02f2"+ + "\u02f0\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5"+ + "\u0005\"\u0000\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02f8\u0001"+ + "\u0000\u0000\u0000\u02f7\u02f9\u0005\"\u0000\u0000\u02f8\u02f7\u0001\u0000"+ + "\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u02fb\u0001\u0000"+ + "\u0000\u0000\u02fa\u02fc\u0005\"\u0000\u0000\u02fb\u02fa\u0001\u0000\u0000"+ + "\u0000\u02fb\u02fc\u0001\u0000\u0000\u0000\u02fc\u02fe\u0001\u0000\u0000"+ + "\u0000\u02fd\u02e0\u0001\u0000\u0000\u0000\u02fd\u02e9\u0001\u0000\u0000"+ + "\u0000\u02fe_\u0001\u0000\u0000\u0000\u02ff\u0301\u0003J\u001d\u0000\u0300"+ + "\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001\u0000\u0000\u0000\u0302"+ + "\u0300\u0001\u0000\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303"+ + "a\u0001\u0000\u0000\u0000\u0304\u0306\u0003J\u001d\u0000\u0305\u0304\u0001"+ + "\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307\u0305\u0001"+ + "\u0000\u0000\u0000\u0307\u0308\u0001\u0000\u0000\u0000\u0308\u0309\u0001"+ + "\u0000\u0000\u0000\u0309\u030d\u0003t2\u0000\u030a\u030c\u0003J\u001d"+ + "\u0000\u030b\u030a\u0001\u0000\u0000\u0000\u030c\u030f\u0001\u0000\u0000"+ + "\u0000\u030d\u030b\u0001\u0000\u0000\u0000\u030d\u030e\u0001\u0000\u0000"+ + "\u0000\u030e\u032f\u0001\u0000\u0000\u0000\u030f\u030d\u0001\u0000\u0000"+ + "\u0000\u0310\u0312\u0003t2\u0000\u0311\u0313\u0003J\u001d\u0000\u0312"+ + "\u0311\u0001\u0000\u0000\u0000\u0313\u0314\u0001\u0000\u0000\u0000\u0314"+ + "\u0312\u0001\u0000\u0000\u0000\u0314\u0315\u0001\u0000\u0000\u0000\u0315"+ + "\u032f\u0001\u0000\u0000\u0000\u0316\u0318\u0003J\u001d\u0000\u0317\u0316"+ + "\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319\u0317"+ + "\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u0322"+ + "\u0001\u0000\u0000\u0000\u031b\u031f\u0003t2\u0000\u031c\u031e\u0003J"+ + "\u001d\u0000\u031d\u031c\u0001\u0000\u0000\u0000\u031e\u0321\u0001\u0000"+ + "\u0000\u0000\u031f\u031d\u0001\u0000\u0000\u0000\u031f\u0320\u0001\u0000"+ + "\u0000\u0000\u0320\u0323\u0001\u0000\u0000\u0000\u0321\u031f\u0001\u0000"+ + "\u0000\u0000\u0322\u031b\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000"+ + "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0325\u0003R!\u0000"+ + "\u0325\u032f\u0001\u0000\u0000\u0000\u0326\u0328\u0003t2\u0000\u0327\u0329"+ + "\u0003J\u001d\u0000\u0328\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001"+ + "\u0000\u0000\u0000\u032a\u0328\u0001\u0000\u0000\u0000\u032a\u032b\u0001"+ + "\u0000\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ + "R!\u0000\u032d\u032f\u0001\u0000\u0000\u0000\u032e\u0305\u0001\u0000\u0000"+ + "\u0000\u032e\u0310\u0001\u0000\u0000\u0000\u032e\u0317\u0001\u0000\u0000"+ + "\u0000\u032e\u0326\u0001\u0000\u0000\u0000\u032fc\u0001\u0000\u0000\u0000"+ + "\u0330\u0331\u0007\u001f\u0000\u0000\u0331\u0332\u0007 \u0000\u0000\u0332"+ + "e\u0001\u0000\u0000\u0000\u0333\u0334\u0007\f\u0000\u0000\u0334\u0335"+ + "\u0007\t\u0000\u0000\u0335\u0336\u0007\u0000\u0000\u0000\u0336g\u0001"+ + "\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\u0002"+ + "\u0000\u0000\u0339\u033a\u0007\u0004\u0000\u0000\u033ai\u0001\u0000\u0000"+ + "\u0000\u033b\u033c\u0005=\u0000\u0000\u033ck\u0001\u0000\u0000\u0000\u033d"+ + "\u033e\u0005:\u0000\u0000\u033e\u033f\u0005:\u0000\u0000\u033fm\u0001"+ + "\u0000\u0000\u0000\u0340\u0341\u0005:\u0000\u0000\u0341o\u0001\u0000\u0000"+ + "\u0000\u0342\u0343\u0005,\u0000\u0000\u0343q\u0001\u0000\u0000\u0000\u0344"+ + "\u0345\u0007\u0000\u0000\u0000\u0345\u0346\u0007\u0003\u0000\u0000\u0346"+ + "\u0347\u0007\u0002\u0000\u0000\u0347\u0348\u0007\u0004\u0000\u0000\u0348"+ + "s\u0001\u0000\u0000\u0000\u0349\u034a\u0005.\u0000\u0000\u034au\u0001"+ + "\u0000\u0000\u0000\u034b\u034c\u0007\u000f\u0000\u0000\u034c\u034d\u0007"+ + "\f\u0000\u0000\u034d\u034e\u0007\r\u0000\u0000\u034e\u034f\u0007\u0002"+ + "\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350w\u0001\u0000\u0000"+ + "\u0000\u0351\u0352\u0007\u000f\u0000\u0000\u0352\u0353\u0007\u0001\u0000"+ + "\u0000\u0353\u0354\u0007\u0006\u0000\u0000\u0354\u0355\u0007\u0002\u0000"+ + "\u0000\u0355\u0356\u0007\u0005\u0000\u0000\u0356y\u0001\u0000\u0000\u0000"+ + "\u0357\u0358\u0007\u0001\u0000\u0000\u0358\u0359\u0007\t\u0000\u0000\u0359"+ + "{\u0001\u0000\u0000\u0000\u035a\u035b\u0007\u0001\u0000\u0000\u035b\u035c"+ + "\u0007\u0002\u0000\u0000\u035c}\u0001\u0000\u0000\u0000\u035d\u035e\u0007"+ + "\r\u0000\u0000\u035e\u035f\u0007\f\u0000\u0000\u035f\u0360\u0007\u0002"+ + "\u0000\u0000\u0360\u0361\u0007\u0005\u0000\u0000\u0361\u007f\u0001\u0000"+ + "\u0000\u0000\u0362\u0363\u0007\r\u0000\u0000\u0363\u0364\u0007\u0001\u0000"+ + "\u0000\u0364\u0365\u0007\u0012\u0000\u0000\u0365\u0366\u0007\u0003\u0000"+ + "\u0000\u0366\u0081\u0001\u0000\u0000\u0000\u0367\u0368\u0005(\u0000\u0000"+ + "\u0368\u0083\u0001\u0000\u0000\u0000\u0369\u036a\u0007\t\u0000\u0000\u036a"+ + "\u036b\u0007\u0007\u0000\u0000\u036b\u036c\u0007\u0005\u0000\u0000\u036c"+ + "\u0085\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e\u036f"+ + "\u0007\u0014\u0000\u0000\u036f\u0370\u0007\r\u0000\u0000\u0370\u0371\u0007"+ + "\r\u0000\u0000\u0371\u0087\u0001\u0000\u0000\u0000\u0372\u0373\u0007\t"+ + "\u0000\u0000\u0373\u0374\u0007\u0014\u0000\u0000\u0374\u0375\u0007\r\u0000"+ + "\u0000\u0375\u0376\u0007\r\u0000\u0000\u0376\u0377\u0007\u0002\u0000\u0000"+ + "\u0377\u0089\u0001\u0000\u0000\u0000\u0378\u0379\u0007\u0007\u0000\u0000"+ + "\u0379\u037a\u0007\u0006\u0000\u0000\u037a\u008b\u0001\u0000\u0000\u0000"+ + "\u037b\u037c\u0005?\u0000\u0000\u037c\u008d\u0001\u0000\u0000\u0000\u037d"+ + "\u037e\u0007\u0006\u0000\u0000\u037e\u037f\u0007\r\u0000\u0000\u037f\u0380"+ + "\u0007\u0001\u0000\u0000\u0380\u0381\u0007\u0012\u0000\u0000\u0381\u0382"+ + "\u0007\u0003\u0000\u0000\u0382\u008f\u0001\u0000\u0000\u0000\u0383\u0384"+ + "\u0005)\u0000\u0000\u0384\u0091\u0001\u0000\u0000\u0000\u0385\u0386\u0007"+ + "\u0005\u0000\u0000\u0386\u0387\u0007\u0006\u0000\u0000\u0387\u0388\u0007"+ + "\u0014\u0000\u0000\u0388\u0389\u0007\u0003\u0000\u0000\u0389\u0093\u0001"+ + "\u0000\u0000\u0000\u038a\u038b\u0005=\u0000\u0000\u038b\u038c\u0005=\u0000"+ + "\u0000\u038c\u0095\u0001\u0000\u0000\u0000\u038d\u038e\u0005=\u0000\u0000"+ + "\u038e\u038f\u0005~\u0000\u0000\u038f\u0097\u0001\u0000\u0000\u0000\u0390"+ + "\u0391\u0005!\u0000\u0000\u0391\u0392\u0005=\u0000\u0000\u0392\u0099\u0001"+ + "\u0000\u0000\u0000\u0393\u0394\u0005<\u0000\u0000\u0394\u009b\u0001\u0000"+ + "\u0000\u0000\u0395\u0396\u0005<\u0000\u0000\u0396\u0397\u0005=\u0000\u0000"+ + "\u0397\u009d\u0001\u0000\u0000\u0000\u0398\u0399\u0005>\u0000\u0000\u0399"+ + "\u009f\u0001\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u039c"+ + "\u0005=\u0000\u0000\u039c\u00a1\u0001\u0000\u0000\u0000\u039d\u039e\u0005"+ + "+\u0000\u0000\u039e\u00a3\u0001\u0000\u0000\u0000\u039f\u03a0\u0005-\u0000"+ + "\u0000\u03a0\u00a5\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005*\u0000\u0000"+ + "\u03a2\u00a7\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005/\u0000\u0000\u03a4"+ + "\u00a9\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005%\u0000\u0000\u03a6\u00ab"+ + "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003.\u000f\u0000\u03a8\u03a9\u0001"+ + "\u0000\u0000\u0000\u03a9\u03aa\u0006N\r\u0000\u03aa\u00ad\u0001\u0000"+ + "\u0000\u0000\u03ab\u03ae\u0003\u008c>\u0000\u03ac\u03af\u0003L\u001e\u0000"+ + "\u03ad\u03af\u0003Z%\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae\u03ad"+ + "\u0001\u0000\u0000\u0000\u03af\u03b3\u0001\u0000\u0000\u0000\u03b0\u03b2"+ + "\u0003\\&\u0000\u03b1\u03b0\u0001\u0000\u0000\u0000\u03b2\u03b5\u0001"+ + "\u0000\u0000\u0000\u03b3\u03b1\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001"+ + "\u0000\u0000\u0000\u03b4\u03bd\u0001\u0000\u0000\u0000\u03b5\u03b3\u0001"+ + "\u0000\u0000\u0000\u03b6\u03b8\u0003\u008c>\u0000\u03b7\u03b9\u0003J\u001d"+ + "\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03ba\u0001\u0000\u0000"+ + "\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb\u0001\u0000\u0000"+ + "\u0000\u03bb\u03bd\u0001\u0000\u0000\u0000\u03bc\u03ab\u0001\u0000\u0000"+ + "\u0000\u03bc\u03b6\u0001\u0000\u0000\u0000\u03bd\u00af\u0001\u0000\u0000"+ + "\u0000\u03be\u03bf\u0005[\u0000\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c1\u0006P\u0000\u0000\u03c1\u03c2\u0006P\u0000\u0000\u03c2\u00b1"+ + "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0005]\u0000\u0000\u03c4\u03c5\u0001"+ + "\u0000\u0000\u0000\u03c5\u03c6\u0006Q\f\u0000\u03c6\u03c7\u0006Q\f\u0000"+ + "\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03cc\u0003L\u001e\u0000\u03c9"+ + "\u03cb\u0003\\&\u0000\u03ca\u03c9\u0001\u0000\u0000\u0000\u03cb\u03ce"+ + "\u0001\u0000\u0000\u0000\u03cc\u03ca\u0001\u0000\u0000\u0000\u03cc\u03cd"+ + "\u0001\u0000\u0000\u0000\u03cd\u03d9\u0001\u0000\u0000\u0000\u03ce\u03cc"+ + "\u0001\u0000\u0000\u0000\u03cf\u03d2\u0003Z%\u0000\u03d0\u03d2\u0003T"+ + "\"\u0000\u03d1\u03cf\u0001\u0000\u0000\u0000\u03d1\u03d0\u0001\u0000\u0000"+ + "\u0000\u03d2\u03d4\u0001\u0000\u0000\u0000\u03d3\u03d5\u0003\\&\u0000"+ + "\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ + "\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ + "\u03d7\u03d9\u0001\u0000\u0000\u0000\u03d8\u03c8\u0001\u0000\u0000\u0000"+ + "\u03d8\u03d1\u0001\u0000\u0000\u0000\u03d9\u00b5\u0001\u0000\u0000\u0000"+ + "\u03da\u03dc\u0003V#\u0000\u03db\u03dd\u0003X$\u0000\u03dc\u03db\u0001"+ + "\u0000\u0000\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03dc\u0001"+ + "\u0000\u0000\u0000\u03de\u03df\u0001\u0000\u0000\u0000\u03df\u03e0\u0001"+ + "\u0000\u0000\u0000\u03e0\u03e1\u0003V#\u0000\u03e1\u00b7\u0001\u0000\u0000"+ + "\u0000\u03e2\u03e3\u0003\u00b6S\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000"+ + "\u03e4\u03e5\u0003B\u0019\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ + "\u03e7\u0006U\u000b\u0000\u03e7\u00bb\u0001\u0000\u0000\u0000\u03e8\u03e9"+ + "\u0003D\u001a\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006"+ + "V\u000b\u0000\u03eb\u00bd\u0001\u0000\u0000\u0000\u03ec\u03ed\u0003F\u001b"+ + "\u0000\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006W\u000b\u0000"+ + "\u03ef\u00bf\u0001\u0000\u0000\u0000\u03f0\u03f1\u0003\u00b0P\u0000\u03f1"+ + "\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006X\u000e\u0000\u03f3\u03f4"+ + "\u0006X\u000f\u0000\u03f4\u00c1\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ + "H\u001c\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006Y\u0010"+ + "\u0000\u03f8\u03f9\u0006Y\f\u0000\u03f9\u00c3\u0001\u0000\u0000\u0000"+ + "\u03fa\u03fb\u0003F\u001b\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ + "\u03fd\u0006Z\u000b\u0000\u03fd\u00c5\u0001\u0000\u0000\u0000\u03fe\u03ff"+ + "\u0003B\u0019\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ + "[\u000b\u0000\u0401\u00c7\u0001\u0000\u0000\u0000\u0402\u0403\u0003D\u001a"+ + "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006\\\u000b\u0000"+ + "\u0405\u00c9\u0001\u0000\u0000\u0000\u0406\u0407\u0003H\u001c\u0000\u0407"+ + "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006]\u0010\u0000\u0409\u040a"+ + "\u0006]\f\u0000\u040a\u00cb\u0001\u0000\u0000\u0000\u040b\u040c\u0003"+ + "\u00b0P\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006^\u000e"+ + "\u0000\u040e\u00cd\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00b2Q\u0000"+ + "\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006_\u0011\u0000\u0412"+ + "\u00cf\u0001\u0000\u0000\u0000\u0413\u0414\u0003n/\u0000\u0414\u0415\u0001"+ + "\u0000\u0000\u0000\u0415\u0416\u0006`\u0012\u0000\u0416\u00d1\u0001\u0000"+ + "\u0000\u0000\u0417\u0418\u0003p0\u0000\u0418\u0419\u0001\u0000\u0000\u0000"+ + "\u0419\u041a\u0006a\u0013\u0000\u041a\u00d3\u0001\u0000\u0000\u0000\u041b"+ + "\u041c\u0003j-\u0000\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006"+ + "b\u0014\u0000\u041e\u00d5\u0001\u0000\u0000\u0000\u041f\u0420\u0007\u0010"+ + "\u0000\u0000\u0420\u0421\u0007\u0003\u0000\u0000\u0421\u0422\u0007\u0005"+ + "\u0000\u0000\u0422\u0423\u0007\f\u0000\u0000\u0423\u0424\u0007\u0000\u0000"+ + "\u0000\u0424\u0425\u0007\f\u0000\u0000\u0425\u0426\u0007\u0005\u0000\u0000"+ + "\u0426\u0427\u0007\f\u0000\u0000\u0427\u00d7\u0001\u0000\u0000\u0000\u0428"+ + "\u042c\b!\u0000\u0000\u0429\u042a\u0005/\u0000\u0000\u042a\u042c\b\"\u0000"+ + "\u0000\u042b\u0428\u0001\u0000\u0000\u0000\u042b\u0429\u0001\u0000\u0000"+ + "\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042f\u0003\u00d8d\u0000"+ + "\u042e\u042d\u0001\u0000\u0000\u0000\u042f\u0430\u0001\u0000\u0000\u0000"+ + "\u0430\u042e\u0001\u0000\u0000\u0000\u0430\u0431\u0001\u0000\u0000\u0000"+ + "\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0433\u0003\u00dae\u0000\u0433"+ + "\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006f\u0015\u0000\u0435\u00dd"+ + "\u0001\u0000\u0000\u0000\u0436\u0437\u0003^\'\u0000\u0437\u0438\u0001"+ + "\u0000\u0000\u0000\u0438\u0439\u0006g\u0016\u0000\u0439\u00df\u0001\u0000"+ + "\u0000\u0000\u043a\u043b\u0003B\u0019\u0000\u043b\u043c\u0001\u0000\u0000"+ + "\u0000\u043c\u043d\u0006h\u000b\u0000\u043d\u00e1\u0001\u0000\u0000\u0000"+ + "\u043e\u043f\u0003D\u001a\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ + "\u0441\u0006i\u000b\u0000\u0441\u00e3\u0001\u0000\u0000\u0000\u0442\u0443"+ + "\u0003F\u001b\u0000\u0443\u0444\u0001\u0000\u0000\u0000\u0444\u0445\u0006"+ + "j\u000b\u0000\u0445\u00e5\u0001\u0000\u0000\u0000\u0446\u0447\u0003H\u001c"+ + "\u0000\u0447\u0448\u0001\u0000\u0000\u0000\u0448\u0449\u0006k\u0010\u0000"+ + "\u0449\u044a\u0006k\f\u0000\u044a\u00e7\u0001\u0000\u0000\u0000\u044b"+ + "\u044c\u0003t2\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u044e\u0006"+ + "l\u0017\u0000\u044e\u00e9\u0001\u0000\u0000\u0000\u044f\u0450\u0003p0"+ + "\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006m\u0013\u0000"+ + "\u0452\u00eb\u0001\u0000\u0000\u0000\u0453\u0454\u0004n\b\u0000\u0454"+ + "\u0455\u0003\u008c>\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457"+ + "\u0006n\u0018\u0000\u0457\u00ed\u0001\u0000\u0000\u0000\u0458\u0459\u0004"+ + "o\t\u0000\u0459\u045a\u0003\u00aeO\u0000\u045a\u045b\u0001\u0000\u0000"+ + "\u0000\u045b\u045c\u0006o\u0019\u0000\u045c\u00ef\u0001\u0000\u0000\u0000"+ + "\u045d\u0462\u0003L\u001e\u0000\u045e\u0462\u0003J\u001d\u0000\u045f\u0462"+ + "\u0003Z%\u0000\u0460\u0462\u0003\u00a6K\u0000\u0461\u045d\u0001\u0000"+ + "\u0000\u0000\u0461\u045e\u0001\u0000\u0000\u0000\u0461\u045f\u0001\u0000"+ + "\u0000\u0000\u0461\u0460\u0001\u0000\u0000\u0000\u0462\u00f1\u0001\u0000"+ + "\u0000\u0000\u0463\u0466\u0003L\u001e\u0000\u0464\u0466\u0003\u00a6K\u0000"+ + "\u0465\u0463\u0001\u0000\u0000\u0000\u0465\u0464\u0001\u0000\u0000\u0000"+ + "\u0466\u046a\u0001\u0000\u0000\u0000\u0467\u0469\u0003\u00f0p\u0000\u0468"+ + "\u0467\u0001\u0000\u0000\u0000\u0469\u046c\u0001\u0000\u0000\u0000\u046a"+ + "\u0468\u0001\u0000\u0000\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b"+ + "\u0477\u0001\u0000\u0000\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046d"+ + "\u0470\u0003Z%\u0000\u046e\u0470\u0003T\"\u0000\u046f\u046d\u0001\u0000"+ + "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0472\u0001\u0000"+ + "\u0000\u0000\u0471\u0473\u0003\u00f0p\u0000\u0472\u0471\u0001\u0000\u0000"+ + "\u0000\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0472\u0001\u0000\u0000"+ + "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0477\u0001\u0000\u0000"+ + "\u0000\u0476\u0465\u0001\u0000\u0000\u0000\u0476\u046f\u0001\u0000\u0000"+ + "\u0000\u0477\u00f3\u0001\u0000\u0000\u0000\u0478\u047b\u0003\u00f2q\u0000"+ + "\u0479\u047b\u0003\u00b6S\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047a"+ + "\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c"+ + "\u047a\u0001\u0000\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d"+ + "\u00f5\u0001\u0000\u0000\u0000\u047e\u047f\u0003B\u0019\u0000\u047f\u0480"+ + "\u0001\u0000\u0000\u0000\u0480\u0481\u0006s\u000b\u0000\u0481\u00f7\u0001"+ + "\u0000\u0000\u0000\u0482\u0483\u0003D\u001a\u0000\u0483\u0484\u0001\u0000"+ + "\u0000\u0000\u0484\u0485\u0006t\u000b\u0000\u0485\u00f9\u0001\u0000\u0000"+ + "\u0000\u0486\u0487\u0003F\u001b\u0000\u0487\u0488\u0001\u0000\u0000\u0000"+ + "\u0488\u0489\u0006u\u000b\u0000\u0489\u00fb\u0001\u0000\u0000\u0000\u048a"+ + "\u048b\u0003H\u001c\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d"+ + "\u0006v\u0010\u0000\u048d\u048e\u0006v\f\u0000\u048e\u00fd\u0001\u0000"+ + "\u0000\u0000\u048f\u0490\u0003j-\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ + "\u0491\u0492\u0006w\u0014\u0000\u0492\u00ff\u0001\u0000\u0000\u0000\u0493"+ + "\u0494\u0003p0\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ + "x\u0013\u0000\u0496\u0101\u0001\u0000\u0000\u0000\u0497\u0498\u0003t2"+ + "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006y\u0017\u0000"+ + "\u049a\u0103\u0001\u0000\u0000\u0000\u049b\u049c\u0004z\n\u0000\u049c"+ + "\u049d\u0003\u008c>\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f"+ + "\u0006z\u0018\u0000\u049f\u0105\u0001\u0000\u0000\u0000\u04a0\u04a1\u0004"+ + "{\u000b\u0000\u04a1\u04a2\u0003\u00aeO\u0000\u04a2\u04a3\u0001\u0000\u0000"+ + "\u0000\u04a3\u04a4\u0006{\u0019\u0000\u04a4\u0107\u0001\u0000\u0000\u0000"+ + "\u04a5\u04a6\u0007\f\u0000\u0000\u04a6\u04a7\u0007\u0002\u0000\u0000\u04a7"+ + "\u0109\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003\u00f4r\u0000\u04a9\u04aa"+ + "\u0001\u0000\u0000\u0000\u04aa\u04ab\u0006}\u001a\u0000\u04ab\u010b\u0001"+ + "\u0000\u0000\u0000\u04ac\u04ad\u0003B\u0019\u0000\u04ad\u04ae\u0001\u0000"+ + "\u0000\u0000\u04ae\u04af\u0006~\u000b\u0000\u04af\u010d\u0001\u0000\u0000"+ + "\u0000\u04b0\u04b1\u0003D\u001a\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000"+ + "\u04b2\u04b3\u0006\u007f\u000b\u0000\u04b3\u010f\u0001\u0000\u0000\u0000"+ + "\u04b4\u04b5\u0003F\u001b\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ + "\u04b7\u0006\u0080\u000b\u0000\u04b7\u0111\u0001\u0000\u0000\u0000\u04b8"+ + "\u04b9\u0003H\u001c\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ + "\u0006\u0081\u0010\u0000\u04bb\u04bc\u0006\u0081\f\u0000\u04bc\u0113\u0001"+ + "\u0000\u0000\u0000\u04bd\u04be\u0003\u00b0P\u0000\u04be\u04bf\u0001\u0000"+ + "\u0000\u0000\u04bf\u04c0\u0006\u0082\u000e\u0000\u04c0\u04c1\u0006\u0082"+ + "\u001b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000\u04c2\u04c3\u0007\u0007"+ + "\u0000\u0000\u04c3\u04c4\u0007\t\u0000\u0000\u04c4\u04c5\u0001\u0000\u0000"+ + "\u0000\u04c5\u04c6\u0006\u0083\u001c\u0000\u04c6\u0117\u0001\u0000\u0000"+ + "\u0000\u04c7\u04c8\u0007\u0013\u0000\u0000\u04c8\u04c9\u0007\u0001\u0000"+ + "\u0000\u04c9\u04ca\u0007\u0005\u0000\u0000\u04ca\u04cb\u0007\n\u0000\u0000"+ + "\u04cb\u04cc\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000"+ + "\u04cd\u0119\u0001\u0000\u0000\u0000\u04ce\u04cf\b#\u0000\u0000\u04cf"+ + "\u011b\u0001\u0000\u0000\u0000\u04d0\u04d2\u0003\u011a\u0085\u0000\u04d1"+ + "\u04d0\u0001\u0000\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3"+ + "\u04d1\u0001\u0000\u0000\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ + "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003n/\u0000\u04d6\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d7\u04d1\u0001\u0000\u0000\u0000\u04d7\u04d8\u0001"+ + "\u0000\u0000\u0000\u04d8\u04da\u0001\u0000\u0000\u0000\u04d9\u04db\u0003"+ + "\u011a\u0085\u0000\u04da\u04d9\u0001\u0000\u0000\u0000\u04db\u04dc\u0001"+ + "\u0000\u0000\u0000\u04dc\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001"+ + "\u0000\u0000\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\u0003"+ + "\u011c\u0086\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u04e1\u0006"+ + "\u0087\u001d\u0000\u04e1\u011f\u0001\u0000\u0000\u0000\u04e2\u04e3\u0003"+ + "B\u0019\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006\u0088"+ + "\u000b\u0000\u04e5\u0121\u0001\u0000\u0000\u0000\u04e6\u04e7\u0003D\u001a"+ + "\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8\u04e9\u0006\u0089\u000b"+ + "\u0000\u04e9\u0123\u0001\u0000\u0000\u0000\u04ea\u04eb\u0003F\u001b\u0000"+ + "\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed\u0006\u008a\u000b\u0000"+ + "\u04ed\u0125\u0001\u0000\u0000\u0000\u04ee\u04ef\u0003H\u001c\u0000\u04ef"+ + "\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006\u008b\u0010\u0000\u04f1"+ + "\u04f2\u0006\u008b\f\u0000\u04f2\u04f3\u0006\u008b\f\u0000\u04f3\u0127"+ + "\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003j-\u0000\u04f5\u04f6\u0001\u0000"+ + "\u0000\u0000\u04f6\u04f7\u0006\u008c\u0014\u0000\u04f7\u0129\u0001\u0000"+ + "\u0000\u0000\u04f8\u04f9\u0003p0\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000"+ + "\u04fa\u04fb\u0006\u008d\u0013\u0000\u04fb\u012b\u0001\u0000\u0000\u0000"+ + "\u04fc\u04fd\u0003t2\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff"+ + "\u0006\u008e\u0017\u0000\u04ff\u012d\u0001\u0000\u0000\u0000\u0500\u0501"+ + "\u0003\u0118\u0084\u0000\u0501\u0502\u0001\u0000\u0000\u0000\u0502\u0503"+ + "\u0006\u008f\u001e\u0000\u0503\u012f\u0001\u0000\u0000\u0000\u0504\u0505"+ + "\u0003\u00f4r\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507\u0006"+ + "\u0090\u001a\u0000\u0507\u0131\u0001\u0000\u0000\u0000\u0508\u0509\u0003"+ + "\u00b8T\u0000\u0509\u050a\u0001\u0000\u0000\u0000\u050a\u050b\u0006\u0091"+ + "\u001f\u0000\u050b\u0133\u0001\u0000\u0000\u0000\u050c\u050d\u0004\u0092"+ + "\f\u0000\u050d\u050e\u0003\u008c>\u0000\u050e\u050f\u0001\u0000\u0000"+ + "\u0000\u050f\u0510\u0006\u0092\u0018\u0000\u0510\u0135\u0001\u0000\u0000"+ + "\u0000\u0511\u0512\u0004\u0093\r\u0000\u0512\u0513\u0003\u00aeO\u0000"+ + "\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u0093\u0019\u0000"+ + "\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0003B\u0019\u0000\u0517"+ + "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u0094\u000b\u0000\u0519"+ + "\u0139\u0001\u0000\u0000\u0000\u051a\u051b\u0003D\u001a\u0000\u051b\u051c"+ + "\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u0095\u000b\u0000\u051d\u013b"+ + "\u0001\u0000\u0000\u0000\u051e\u051f\u0003F\u001b\u0000\u051f\u0520\u0001"+ + "\u0000\u0000\u0000\u0520\u0521\u0006\u0096\u000b\u0000\u0521\u013d\u0001"+ + "\u0000\u0000\u0000\u0522\u0523\u0003H\u001c\u0000\u0523\u0524\u0001\u0000"+ + "\u0000\u0000\u0524\u0525\u0006\u0097\u0010\u0000\u0525\u0526\u0006\u0097"+ + "\f\u0000\u0526\u013f\u0001\u0000\u0000\u0000\u0527\u0528\u0003t2\u0000"+ + "\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u0098\u0017\u0000"+ + "\u052a\u0141\u0001\u0000\u0000\u0000\u052b\u052c\u0004\u0099\u000e\u0000"+ + "\u052c\u052d\u0003\u008c>\u0000\u052d\u052e\u0001\u0000\u0000\u0000\u052e"+ + "\u052f\u0006\u0099\u0018\u0000\u052f\u0143\u0001\u0000\u0000\u0000\u0530"+ + "\u0531\u0004\u009a\u000f\u0000\u0531\u0532\u0003\u00aeO\u0000\u0532\u0533"+ + "\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0019\u0000\u0534\u0145"+ + "\u0001\u0000\u0000\u0000\u0535\u0536\u0003\u00b8T\u0000\u0536\u0537\u0001"+ + "\u0000\u0000\u0000\u0537\u0538\u0006\u009b\u001f\u0000\u0538\u0147\u0001"+ + "\u0000\u0000\u0000\u0539\u053a\u0003\u00b4R\u0000\u053a\u053b\u0001\u0000"+ + "\u0000\u0000\u053b\u053c\u0006\u009c \u0000\u053c\u0149\u0001\u0000\u0000"+ + "\u0000\u053d\u053e\u0003B\u0019\u0000\u053e\u053f\u0001\u0000\u0000\u0000"+ + "\u053f\u0540\u0006\u009d\u000b\u0000\u0540\u014b\u0001\u0000\u0000\u0000"+ + "\u0541\u0542\u0003D\u001a\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543"+ + "\u0544\u0006\u009e\u000b\u0000\u0544\u014d\u0001\u0000\u0000\u0000\u0545"+ + "\u0546\u0003F\u001b\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548"+ + "\u0006\u009f\u000b\u0000\u0548\u014f\u0001\u0000\u0000\u0000\u0549\u054a"+ + "\u0003H\u001c\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006"+ + "\u00a0\u0010\u0000\u054c\u054d\u0006\u00a0\f\u0000\u054d\u0151\u0001\u0000"+ + "\u0000\u0000\u054e\u054f\u0007\u0001\u0000\u0000\u054f\u0550\u0007\t\u0000"+ + "\u0000\u0550\u0551\u0007\u000f\u0000\u0000\u0551\u0552\u0007\u0007\u0000"+ + "\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553\u0554\u0003B\u0019\u0000"+ + "\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556\u0006\u00a2\u000b\u0000"+ + "\u0556\u0155\u0001\u0000\u0000\u0000\u0557\u0558\u0003D\u001a\u0000\u0558"+ + "\u0559\u0001\u0000\u0000\u0000\u0559\u055a\u0006\u00a3\u000b\u0000\u055a"+ + "\u0157\u0001\u0000\u0000\u0000\u055b\u055c\u0003F\u001b\u0000\u055c\u055d"+ + "\u0001\u0000\u0000\u0000\u055d\u055e\u0006\u00a4\u000b\u0000\u055e\u0159"+ + "\u0001\u0000\u0000\u0000\u055f\u0560\u0003\u00b2Q\u0000\u0560\u0561\u0001"+ + "\u0000\u0000\u0000\u0561\u0562\u0006\u00a5\u0011\u0000\u0562\u0563\u0006"+ + "\u00a5\f\u0000\u0563\u015b\u0001\u0000\u0000\u0000\u0564\u0565\u0003n"+ + "/\u0000\u0565\u0566\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00a6\u0012"+ + "\u0000\u0567\u015d\u0001\u0000\u0000\u0000\u0568\u056e\u0003T\"\u0000"+ + "\u0569\u056e\u0003J\u001d\u0000\u056a\u056e\u0003t2\u0000\u056b\u056e"+ + "\u0003L\u001e\u0000\u056c\u056e\u0003Z%\u0000\u056d\u0568\u0001\u0000"+ + "\u0000\u0000\u056d\u0569\u0001\u0000\u0000\u0000\u056d\u056a\u0001\u0000"+ + "\u0000\u0000\u056d\u056b\u0001\u0000\u0000\u0000\u056d\u056c\u0001\u0000"+ + "\u0000\u0000\u056e\u056f\u0001\u0000\u0000\u0000\u056f\u056d\u0001\u0000"+ + "\u0000\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u015f\u0001\u0000"+ + "\u0000\u0000\u0571\u0572\u0003B\u0019\u0000\u0572\u0573\u0001\u0000\u0000"+ + "\u0000\u0573\u0574\u0006\u00a8\u000b\u0000\u0574\u0161\u0001\u0000\u0000"+ + "\u0000\u0575\u0576\u0003D\u001a\u0000\u0576\u0577\u0001\u0000\u0000\u0000"+ + "\u0577\u0578\u0006\u00a9\u000b\u0000\u0578\u0163\u0001\u0000\u0000\u0000"+ + "\u0579\u057a\u0003F\u001b\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ + "\u057c\u0006\u00aa\u000b\u0000\u057c\u0165\u0001\u0000\u0000\u0000\u057d"+ + "\u057e\u0003H\u001c\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u0580"+ + "\u0006\u00ab\u0010\u0000\u0580\u0581\u0006\u00ab\f\u0000\u0581\u0167\u0001"+ + "\u0000\u0000\u0000\u0582\u0583\u0003n/\u0000\u0583\u0584\u0001\u0000\u0000"+ + "\u0000\u0584\u0585\u0006\u00ac\u0012\u0000\u0585\u0169\u0001\u0000\u0000"+ + "\u0000\u0586\u0587\u0003p0\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ + "\u0589\u0006\u00ad\u0013\u0000\u0589\u016b\u0001\u0000\u0000\u0000\u058a"+ + "\u058b\u0003t2\u0000\u058b\u058c\u0001\u0000\u0000\u0000\u058c\u058d\u0006"+ + "\u00ae\u0017\u0000\u058d\u016d\u0001\u0000\u0000\u0000\u058e\u058f\u0003"+ + "\u0116\u0083\u0000\u058f\u0590\u0001\u0000\u0000\u0000\u0590\u0591\u0006"+ + "\u00af!\u0000\u0591\u0592\u0006\u00af\"\u0000\u0592\u016f\u0001\u0000"+ + "\u0000\u0000\u0593\u0594\u0003\u00dae\u0000\u0594\u0595\u0001\u0000\u0000"+ + "\u0000\u0595\u0596\u0006\u00b0\u0015\u0000\u0596\u0171\u0001\u0000\u0000"+ + "\u0000\u0597\u0598\u0003^\'\u0000\u0598\u0599\u0001\u0000\u0000\u0000"+ + "\u0599\u059a\u0006\u00b1\u0016\u0000\u059a\u0173\u0001\u0000\u0000\u0000"+ + "\u059b\u059c\u0003B\u0019\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ + "\u059e\u0006\u00b2\u000b\u0000\u059e\u0175\u0001\u0000\u0000\u0000\u059f"+ + "\u05a0\u0003D\u001a\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2"+ + "\u0006\u00b3\u000b\u0000\u05a2\u0177\u0001\u0000\u0000\u0000\u05a3\u05a4"+ + "\u0003F\u001b\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006"+ + "\u00b4\u000b\u0000\u05a6\u0179\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003"+ + "H\u001c\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000\u05a9\u05aa\u0006\u00b5"+ + "\u0010\u0000\u05aa\u05ab\u0006\u00b5\f\u0000\u05ab\u05ac\u0006\u00b5\f"+ + "\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae\u0003p0\u0000\u05ae"+ + "\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006\u00b6\u0013\u0000\u05b0"+ + "\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003t2\u0000\u05b2\u05b3\u0001"+ + "\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7\u0017\u0000\u05b4\u017f\u0001"+ + "\u0000\u0000\u0000\u05b5\u05b6\u0003\u00f4r\u0000\u05b6\u05b7\u0001\u0000"+ + "\u0000\u0000\u05b7\u05b8\u0006\u00b8\u001a\u0000\u05b8\u0181\u0001\u0000"+ + "\u0000\u0000\u05b9\u05ba\u0003B\u0019\u0000\u05ba\u05bb\u0001\u0000\u0000"+ + "\u0000\u05bb\u05bc\u0006\u00b9\u000b\u0000\u05bc\u0183\u0001\u0000\u0000"+ + "\u0000\u05bd\u05be\u0003D\u001a\u0000\u05be\u05bf\u0001\u0000\u0000\u0000"+ + "\u05bf\u05c0\u0006\u00ba\u000b\u0000\u05c0\u0185\u0001\u0000\u0000\u0000"+ + "\u05c1\u05c2\u0003F\u001b\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3"+ + "\u05c4\u0006\u00bb\u000b\u0000\u05c4\u0187\u0001\u0000\u0000\u0000\u05c5"+ + "\u05c6\u0003H\u001c\u0000\u05c6\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8"+ + "\u0006\u00bc\u0010\u0000\u05c8\u05c9\u0006\u00bc\f\u0000\u05c9\u0189\u0001"+ + "\u0000\u0000\u0000\u05ca\u05cb\u00036\u0013\u0000\u05cb\u05cc\u0001\u0000"+ + "\u0000\u0000\u05cc\u05cd\u0006\u00bd#\u0000\u05cd\u018b\u0001\u0000\u0000"+ + "\u0000\u05ce\u05cf\u0003\u0108|\u0000\u05cf\u05d0\u0001\u0000\u0000\u0000"+ + "\u05d0\u05d1\u0006\u00be$\u0000\u05d1\u018d\u0001\u0000\u0000\u0000\u05d2"+ + "\u05d3\u0003\u0116\u0083\u0000\u05d3\u05d4\u0001\u0000\u0000\u0000\u05d4"+ + "\u05d5\u0006\u00bf!\u0000\u05d5\u05d6\u0006\u00bf\f\u0000\u05d6\u05d7"+ + "\u0006\u00bf\u0000\u0000\u05d7\u018f\u0001\u0000\u0000\u0000\u05d8\u05d9"+ + "\u0007\u0014\u0000\u0000\u05d9\u05da\u0007\u0002\u0000\u0000\u05da\u05db"+ + "\u0007\u0001\u0000\u0000\u05db\u05dc\u0007\t\u0000\u0000\u05dc\u05dd\u0007"+ + "\u0011\u0000\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de\u05df\u0006"+ + "\u00c0\f\u0000\u05df\u05e0\u0006\u00c0\u0000\u0000\u05e0\u0191\u0001\u0000"+ + "\u0000\u0000\u05e1\u05e2\u0003\u00b4R\u0000\u05e2\u05e3\u0001\u0000\u0000"+ + "\u0000\u05e3\u05e4\u0006\u00c1 \u0000\u05e4\u0193\u0001\u0000\u0000\u0000"+ + "\u05e5\u05e6\u0003\u00b8T\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7"+ + "\u05e8\u0006\u00c2\u001f\u0000\u05e8\u0195\u0001\u0000\u0000\u0000\u05e9"+ + "\u05ea\u0003B\u0019\u0000\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec"+ + "\u0006\u00c3\u000b\u0000\u05ec\u0197\u0001\u0000\u0000\u0000\u05ed\u05ee"+ + "\u0003D\u001a\u0000\u05ee\u05ef\u0001\u0000\u0000\u0000\u05ef\u05f0\u0006"+ + "\u00c4\u000b\u0000\u05f0\u0199\u0001\u0000\u0000\u0000\u05f1\u05f2\u0003"+ + "F\u001b\u0000\u05f2\u05f3\u0001\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c5"+ + "\u000b\u0000\u05f4\u019b\u0001\u0000\u0000\u0000\u05f5\u05f6\u0003H\u001c"+ + "\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000\u05f7\u05f8\u0006\u00c6\u0010"+ + "\u0000\u05f8\u05f9\u0006\u00c6\f\u0000\u05f9\u019d\u0001\u0000\u0000\u0000"+ + "\u05fa\u05fb\u0003\u00dae\u0000\u05fb\u05fc\u0001\u0000\u0000\u0000\u05fc"+ + "\u05fd\u0006\u00c7\u0015\u0000\u05fd\u05fe\u0006\u00c7\f\u0000\u05fe\u05ff"+ + "\u0006\u00c7%\u0000\u05ff\u019f\u0001\u0000\u0000\u0000\u0600\u0601\u0003"+ + "^\'\u0000\u0601\u0602\u0001\u0000\u0000\u0000\u0602\u0603\u0006\u00c8"+ + "\u0016\u0000\u0603\u0604\u0006\u00c8\f\u0000\u0604\u0605\u0006\u00c8%"+ + "\u0000\u0605\u01a1\u0001\u0000\u0000\u0000\u0606\u0607\u0003B\u0019\u0000"+ + "\u0607\u0608\u0001\u0000\u0000\u0000\u0608\u0609\u0006\u00c9\u000b\u0000"+ + "\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003D\u001a\u0000\u060b"+ + "\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca\u000b\u0000\u060d"+ + "\u01a5\u0001\u0000\u0000\u0000\u060e\u060f\u0003F\u001b\u0000\u060f\u0610"+ + "\u0001\u0000\u0000\u0000\u0610\u0611\u0006\u00cb\u000b\u0000\u0611\u01a7"+ + "\u0001\u0000\u0000\u0000\u0612\u0613\u0003n/\u0000\u0613\u0614\u0001\u0000"+ + "\u0000\u0000\u0614\u0615\u0006\u00cc\u0012\u0000\u0615\u0616\u0006\u00cc"+ + "\f\u0000\u0616\u0617\u0006\u00cc\t\u0000\u0617\u01a9\u0001\u0000\u0000"+ + "\u0000\u0618\u0619\u0003p0\u0000\u0619\u061a\u0001\u0000\u0000\u0000\u061a"+ + "\u061b\u0006\u00cd\u0013\u0000\u061b\u061c\u0006\u00cd\f\u0000\u061c\u061d"+ + "\u0006\u00cd\t\u0000\u061d\u01ab\u0001\u0000\u0000\u0000\u061e\u061f\u0003"+ + "B\u0019\u0000\u061f\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00ce"+ + "\u000b\u0000\u0621\u01ad\u0001\u0000\u0000\u0000\u0622\u0623\u0003D\u001a"+ + "\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624\u0625\u0006\u00cf\u000b"+ + "\u0000\u0625\u01af\u0001\u0000\u0000\u0000\u0626\u0627\u0003F\u001b\u0000"+ + "\u0627\u0628\u0001\u0000\u0000\u0000\u0628\u0629\u0006\u00d0\u000b\u0000"+ + "\u0629\u01b1\u0001\u0000\u0000\u0000\u062a\u062b\u0003\u00b8T\u0000\u062b"+ + "\u062c\u0001\u0000\u0000\u0000\u062c\u062d\u0006\u00d1\f\u0000\u062d\u062e"+ + "\u0006\u00d1\u0000\u0000\u062e\u062f\u0006\u00d1\u001f\u0000\u062f\u01b3"+ + "\u0001\u0000\u0000\u0000\u0630\u0631\u0003\u00b4R\u0000\u0631\u0632\u0001"+ + "\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\f\u0000\u0633\u0634\u0006\u00d2"+ + "\u0000\u0000\u0634\u0635\u0006\u00d2 \u0000\u0635\u01b5\u0001\u0000\u0000"+ + "\u0000\u0636\u0637\u0003d*\u0000\u0637\u0638\u0001\u0000\u0000\u0000\u0638"+ + "\u0639\u0006\u00d3\f\u0000\u0639\u063a\u0006\u00d3\u0000\u0000\u063a\u063b"+ + "\u0006\u00d3&\u0000\u063b\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003"+ + "H\u001c\u0000\u063d\u063e\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4"+ + "\u0010\u0000\u063f\u0640\u0006\u00d4\f\u0000\u0640\u01b9\u0001\u0000\u0000"+ "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ - "\r\u000e\u000f\u0291\u029b\u029f\u02a2\u02ab\u02ad\u02b8\u02cd\u02d2\u02db"+ - "\u02e2\u02e7\u02e9\u02f4\u02fc\u02ff\u0301\u0306\u030b\u0311\u0318\u031d"+ - "\u0323\u0326\u032e\u0332\u03b5\u03ba\u03c1\u03c3\u03d3\u03d8\u03dd\u03df"+ - "\u03e5\u0432\u0437\u0468\u046c\u0471\u0476\u047b\u047d\u0481\u0483\u04da"+ - "\u04de\u04e3\u0574\u0576\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ + "\r\u000e\u000f\u028f\u0299\u029d\u02a0\u02a9\u02ab\u02b6\u02c9\u02ce\u02d7"+ + "\u02de\u02e3\u02e5\u02f0\u02f8\u02fb\u02fd\u0302\u0307\u030d\u0314\u0319"+ + "\u031f\u0322\u032a\u032e\u03ae\u03b3\u03ba\u03bc\u03cc\u03d1\u03d6\u03d8"+ + "\u03de\u042b\u0430\u0461\u0465\u046a\u046f\u0474\u0476\u047a\u047c\u04d3"+ + "\u04d7\u04dc\u056d\u056f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ - "\u0001\u0000\u0004\u0000\u0000\u0007\u001d\u0000\u0007\u0010\u0000\u0007"+ - "F\u0000\u0005\u0000\u0000\u0007\u001e\u0000\u0007G\u0000\u0007\'\u0000"+ - "\u0007%\u0000\u0007Q\u0000\u0007\u001f\u0000\u0007)\u0000\u00075\u0000"+ - "\u0007E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000"+ - "\u0007^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007"+ - "\u0014\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007\"\u0000"; + "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007F\u0000\u0005\u0000"+ + "\u0000\u0007\u001d\u0000\u0007G\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ + "$\u0000\u0007Q\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ + "E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000\u0007"+ + "^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007\u0014"+ + "\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 71930451ad55c..50493f584fe4c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -28,7 +28,6 @@ null null null null -':' '|' null null @@ -38,6 +37,7 @@ null 'asc' '=' '::' +':' ',' 'desc' '.' @@ -159,7 +159,6 @@ UNKNOWN_CMD LINE_COMMENT MULTILINE_COMMENT WS -COLON PIPE QUOTED_STRING INTEGER_LITERAL @@ -169,6 +168,7 @@ AND ASC ASSIGN CAST_OP +COLON COMMA DESC DOT @@ -330,4 +330,4 @@ joinPredicate atn: -[4, 1, 128, 636, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 208, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 216, 8, 5, 10, 5, 12, 5, 219, 9, 5, 1, 6, 1, 6, 3, 6, 223, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 230, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 235, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 246, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 252, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 260, 8, 9, 10, 9, 12, 9, 263, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 273, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 278, 8, 10, 10, 10, 12, 10, 281, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 289, 8, 11, 10, 11, 12, 11, 292, 9, 11, 3, 11, 294, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 308, 8, 15, 10, 15, 12, 15, 311, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 316, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 324, 8, 17, 10, 17, 12, 17, 327, 9, 17, 1, 17, 3, 17, 330, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 335, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 345, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 351, 8, 22, 10, 22, 12, 22, 354, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 364, 8, 24, 10, 24, 12, 24, 367, 9, 24, 1, 24, 3, 24, 370, 8, 24, 1, 24, 1, 24, 3, 24, 374, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 381, 8, 26, 1, 26, 1, 26, 3, 26, 385, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 390, 8, 27, 10, 27, 12, 27, 393, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 398, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 403, 8, 29, 10, 29, 12, 29, 406, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 411, 8, 30, 10, 30, 12, 30, 414, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 419, 8, 31, 10, 31, 12, 31, 422, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 429, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 444, 8, 34, 10, 34, 12, 34, 447, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 455, 8, 34, 10, 34, 12, 34, 458, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 466, 8, 34, 10, 34, 12, 34, 469, 9, 34, 1, 34, 1, 34, 3, 34, 473, 8, 34, 1, 35, 1, 35, 3, 35, 477, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 482, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 491, 8, 38, 10, 38, 12, 38, 494, 9, 38, 1, 39, 1, 39, 3, 39, 498, 8, 39, 1, 39, 1, 39, 3, 39, 502, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 514, 8, 42, 10, 42, 12, 42, 517, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 527, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 539, 8, 47, 10, 47, 12, 47, 542, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 552, 8, 50, 1, 51, 3, 51, 555, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 560, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 582, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 588, 8, 58, 10, 58, 12, 58, 591, 9, 58, 3, 58, 593, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 598, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 611, 8, 61, 1, 62, 3, 62, 614, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 623, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 629, 8, 64, 10, 64, 12, 64, 632, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 31, 31, 81, 81, 1, 0, 72, 73, 2, 0, 36, 36, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 661, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 207, 1, 0, 0, 0, 12, 234, 1, 0, 0, 0, 14, 236, 1, 0, 0, 0, 16, 245, 1, 0, 0, 0, 18, 251, 1, 0, 0, 0, 20, 272, 1, 0, 0, 0, 22, 282, 1, 0, 0, 0, 24, 297, 1, 0, 0, 0, 26, 299, 1, 0, 0, 0, 28, 301, 1, 0, 0, 0, 30, 304, 1, 0, 0, 0, 32, 315, 1, 0, 0, 0, 34, 319, 1, 0, 0, 0, 36, 334, 1, 0, 0, 0, 38, 338, 1, 0, 0, 0, 40, 340, 1, 0, 0, 0, 42, 344, 1, 0, 0, 0, 44, 346, 1, 0, 0, 0, 46, 355, 1, 0, 0, 0, 48, 359, 1, 0, 0, 0, 50, 375, 1, 0, 0, 0, 52, 378, 1, 0, 0, 0, 54, 386, 1, 0, 0, 0, 56, 394, 1, 0, 0, 0, 58, 399, 1, 0, 0, 0, 60, 407, 1, 0, 0, 0, 62, 415, 1, 0, 0, 0, 64, 423, 1, 0, 0, 0, 66, 428, 1, 0, 0, 0, 68, 472, 1, 0, 0, 0, 70, 476, 1, 0, 0, 0, 72, 481, 1, 0, 0, 0, 74, 483, 1, 0, 0, 0, 76, 486, 1, 0, 0, 0, 78, 495, 1, 0, 0, 0, 80, 503, 1, 0, 0, 0, 82, 506, 1, 0, 0, 0, 84, 509, 1, 0, 0, 0, 86, 518, 1, 0, 0, 0, 88, 522, 1, 0, 0, 0, 90, 528, 1, 0, 0, 0, 92, 532, 1, 0, 0, 0, 94, 535, 1, 0, 0, 0, 96, 543, 1, 0, 0, 0, 98, 547, 1, 0, 0, 0, 100, 551, 1, 0, 0, 0, 102, 554, 1, 0, 0, 0, 104, 559, 1, 0, 0, 0, 106, 563, 1, 0, 0, 0, 108, 565, 1, 0, 0, 0, 110, 567, 1, 0, 0, 0, 112, 570, 1, 0, 0, 0, 114, 574, 1, 0, 0, 0, 116, 577, 1, 0, 0, 0, 118, 597, 1, 0, 0, 0, 120, 601, 1, 0, 0, 0, 122, 606, 1, 0, 0, 0, 124, 613, 1, 0, 0, 0, 126, 619, 1, 0, 0, 0, 128, 624, 1, 0, 0, 0, 130, 633, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 30, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 208, 3, 10, 5, 8, 180, 208, 3, 16, 8, 0, 181, 208, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 208, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 208, 1, 0, 0, 0, 205, 206, 4, 5, 5, 0, 206, 208, 3, 14, 7, 0, 207, 177, 1, 0, 0, 0, 207, 180, 1, 0, 0, 0, 207, 181, 1, 0, 0, 0, 207, 182, 1, 0, 0, 0, 207, 198, 1, 0, 0, 0, 207, 205, 1, 0, 0, 0, 208, 217, 1, 0, 0, 0, 209, 210, 10, 5, 0, 0, 210, 211, 5, 35, 0, 0, 211, 216, 3, 10, 5, 6, 212, 213, 10, 4, 0, 0, 213, 214, 5, 52, 0, 0, 214, 216, 3, 10, 5, 5, 215, 209, 1, 0, 0, 0, 215, 212, 1, 0, 0, 0, 216, 219, 1, 0, 0, 0, 217, 215, 1, 0, 0, 0, 217, 218, 1, 0, 0, 0, 218, 11, 1, 0, 0, 0, 219, 217, 1, 0, 0, 0, 220, 222, 3, 16, 8, 0, 221, 223, 5, 49, 0, 0, 222, 221, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 1, 0, 0, 0, 224, 225, 5, 47, 0, 0, 225, 226, 3, 106, 53, 0, 226, 235, 1, 0, 0, 0, 227, 229, 3, 16, 8, 0, 228, 230, 5, 49, 0, 0, 229, 228, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 5, 54, 0, 0, 232, 233, 3, 106, 53, 0, 233, 235, 1, 0, 0, 0, 234, 220, 1, 0, 0, 0, 234, 227, 1, 0, 0, 0, 235, 13, 1, 0, 0, 0, 236, 237, 3, 58, 29, 0, 237, 238, 5, 29, 0, 0, 238, 239, 3, 68, 34, 0, 239, 15, 1, 0, 0, 0, 240, 246, 3, 18, 9, 0, 241, 242, 3, 18, 9, 0, 242, 243, 3, 108, 54, 0, 243, 244, 3, 18, 9, 0, 244, 246, 1, 0, 0, 0, 245, 240, 1, 0, 0, 0, 245, 241, 1, 0, 0, 0, 246, 17, 1, 0, 0, 0, 247, 248, 6, 9, -1, 0, 248, 252, 3, 20, 10, 0, 249, 250, 7, 0, 0, 0, 250, 252, 3, 18, 9, 3, 251, 247, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 261, 1, 0, 0, 0, 253, 254, 10, 2, 0, 0, 254, 255, 7, 1, 0, 0, 255, 260, 3, 18, 9, 3, 256, 257, 10, 1, 0, 0, 257, 258, 7, 0, 0, 0, 258, 260, 3, 18, 9, 2, 259, 253, 1, 0, 0, 0, 259, 256, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 19, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 265, 6, 10, -1, 0, 265, 273, 3, 68, 34, 0, 266, 273, 3, 58, 29, 0, 267, 273, 3, 22, 11, 0, 268, 269, 5, 48, 0, 0, 269, 270, 3, 10, 5, 0, 270, 271, 5, 55, 0, 0, 271, 273, 1, 0, 0, 0, 272, 264, 1, 0, 0, 0, 272, 266, 1, 0, 0, 0, 272, 267, 1, 0, 0, 0, 272, 268, 1, 0, 0, 0, 273, 279, 1, 0, 0, 0, 274, 275, 10, 1, 0, 0, 275, 276, 5, 38, 0, 0, 276, 278, 3, 26, 13, 0, 277, 274, 1, 0, 0, 0, 278, 281, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 21, 1, 0, 0, 0, 281, 279, 1, 0, 0, 0, 282, 283, 3, 24, 12, 0, 283, 293, 5, 48, 0, 0, 284, 294, 5, 66, 0, 0, 285, 290, 3, 10, 5, 0, 286, 287, 5, 39, 0, 0, 287, 289, 3, 10, 5, 0, 288, 286, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 288, 1, 0, 0, 0, 290, 291, 1, 0, 0, 0, 291, 294, 1, 0, 0, 0, 292, 290, 1, 0, 0, 0, 293, 284, 1, 0, 0, 0, 293, 285, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 5, 55, 0, 0, 296, 23, 1, 0, 0, 0, 297, 298, 3, 72, 36, 0, 298, 25, 1, 0, 0, 0, 299, 300, 3, 64, 32, 0, 300, 27, 1, 0, 0, 0, 301, 302, 5, 12, 0, 0, 302, 303, 3, 30, 15, 0, 303, 29, 1, 0, 0, 0, 304, 309, 3, 32, 16, 0, 305, 306, 5, 39, 0, 0, 306, 308, 3, 32, 16, 0, 307, 305, 1, 0, 0, 0, 308, 311, 1, 0, 0, 0, 309, 307, 1, 0, 0, 0, 309, 310, 1, 0, 0, 0, 310, 31, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 312, 313, 3, 58, 29, 0, 313, 314, 5, 37, 0, 0, 314, 316, 1, 0, 0, 0, 315, 312, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 1, 0, 0, 0, 317, 318, 3, 10, 5, 0, 318, 33, 1, 0, 0, 0, 319, 320, 5, 6, 0, 0, 320, 325, 3, 36, 18, 0, 321, 322, 5, 39, 0, 0, 322, 324, 3, 36, 18, 0, 323, 321, 1, 0, 0, 0, 324, 327, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 326, 1, 0, 0, 0, 326, 329, 1, 0, 0, 0, 327, 325, 1, 0, 0, 0, 328, 330, 3, 42, 21, 0, 329, 328, 1, 0, 0, 0, 329, 330, 1, 0, 0, 0, 330, 35, 1, 0, 0, 0, 331, 332, 3, 38, 19, 0, 332, 333, 5, 29, 0, 0, 333, 335, 1, 0, 0, 0, 334, 331, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 1, 0, 0, 0, 336, 337, 3, 40, 20, 0, 337, 37, 1, 0, 0, 0, 338, 339, 5, 81, 0, 0, 339, 39, 1, 0, 0, 0, 340, 341, 7, 2, 0, 0, 341, 41, 1, 0, 0, 0, 342, 345, 3, 44, 22, 0, 343, 345, 3, 46, 23, 0, 344, 342, 1, 0, 0, 0, 344, 343, 1, 0, 0, 0, 345, 43, 1, 0, 0, 0, 346, 347, 5, 80, 0, 0, 347, 352, 5, 81, 0, 0, 348, 349, 5, 39, 0, 0, 349, 351, 5, 81, 0, 0, 350, 348, 1, 0, 0, 0, 351, 354, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 45, 1, 0, 0, 0, 354, 352, 1, 0, 0, 0, 355, 356, 5, 70, 0, 0, 356, 357, 3, 44, 22, 0, 357, 358, 5, 71, 0, 0, 358, 47, 1, 0, 0, 0, 359, 360, 5, 19, 0, 0, 360, 365, 3, 36, 18, 0, 361, 362, 5, 39, 0, 0, 362, 364, 3, 36, 18, 0, 363, 361, 1, 0, 0, 0, 364, 367, 1, 0, 0, 0, 365, 363, 1, 0, 0, 0, 365, 366, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 368, 370, 3, 54, 27, 0, 369, 368, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 372, 5, 34, 0, 0, 372, 374, 3, 30, 15, 0, 373, 371, 1, 0, 0, 0, 373, 374, 1, 0, 0, 0, 374, 49, 1, 0, 0, 0, 375, 376, 5, 4, 0, 0, 376, 377, 3, 30, 15, 0, 377, 51, 1, 0, 0, 0, 378, 380, 5, 15, 0, 0, 379, 381, 3, 54, 27, 0, 380, 379, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 383, 5, 34, 0, 0, 383, 385, 3, 30, 15, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 53, 1, 0, 0, 0, 386, 391, 3, 56, 28, 0, 387, 388, 5, 39, 0, 0, 388, 390, 3, 56, 28, 0, 389, 387, 1, 0, 0, 0, 390, 393, 1, 0, 0, 0, 391, 389, 1, 0, 0, 0, 391, 392, 1, 0, 0, 0, 392, 55, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 394, 397, 3, 32, 16, 0, 395, 396, 5, 16, 0, 0, 396, 398, 3, 10, 5, 0, 397, 395, 1, 0, 0, 0, 397, 398, 1, 0, 0, 0, 398, 57, 1, 0, 0, 0, 399, 404, 3, 72, 36, 0, 400, 401, 5, 41, 0, 0, 401, 403, 3, 72, 36, 0, 402, 400, 1, 0, 0, 0, 403, 406, 1, 0, 0, 0, 404, 402, 1, 0, 0, 0, 404, 405, 1, 0, 0, 0, 405, 59, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 407, 412, 3, 66, 33, 0, 408, 409, 5, 41, 0, 0, 409, 411, 3, 66, 33, 0, 410, 408, 1, 0, 0, 0, 411, 414, 1, 0, 0, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 61, 1, 0, 0, 0, 414, 412, 1, 0, 0, 0, 415, 420, 3, 60, 30, 0, 416, 417, 5, 39, 0, 0, 417, 419, 3, 60, 30, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 63, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 7, 3, 0, 0, 424, 65, 1, 0, 0, 0, 425, 429, 5, 85, 0, 0, 426, 427, 4, 33, 11, 0, 427, 429, 3, 70, 35, 0, 428, 425, 1, 0, 0, 0, 428, 426, 1, 0, 0, 0, 429, 67, 1, 0, 0, 0, 430, 473, 5, 50, 0, 0, 431, 432, 3, 104, 52, 0, 432, 433, 5, 72, 0, 0, 433, 473, 1, 0, 0, 0, 434, 473, 3, 102, 51, 0, 435, 473, 3, 104, 52, 0, 436, 473, 3, 98, 49, 0, 437, 473, 3, 70, 35, 0, 438, 473, 3, 106, 53, 0, 439, 440, 5, 70, 0, 0, 440, 445, 3, 100, 50, 0, 441, 442, 5, 39, 0, 0, 442, 444, 3, 100, 50, 0, 443, 441, 1, 0, 0, 0, 444, 447, 1, 0, 0, 0, 445, 443, 1, 0, 0, 0, 445, 446, 1, 0, 0, 0, 446, 448, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 448, 449, 5, 71, 0, 0, 449, 473, 1, 0, 0, 0, 450, 451, 5, 70, 0, 0, 451, 456, 3, 98, 49, 0, 452, 453, 5, 39, 0, 0, 453, 455, 3, 98, 49, 0, 454, 452, 1, 0, 0, 0, 455, 458, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 456, 457, 1, 0, 0, 0, 457, 459, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 459, 460, 5, 71, 0, 0, 460, 473, 1, 0, 0, 0, 461, 462, 5, 70, 0, 0, 462, 467, 3, 106, 53, 0, 463, 464, 5, 39, 0, 0, 464, 466, 3, 106, 53, 0, 465, 463, 1, 0, 0, 0, 466, 469, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 470, 1, 0, 0, 0, 469, 467, 1, 0, 0, 0, 470, 471, 5, 71, 0, 0, 471, 473, 1, 0, 0, 0, 472, 430, 1, 0, 0, 0, 472, 431, 1, 0, 0, 0, 472, 434, 1, 0, 0, 0, 472, 435, 1, 0, 0, 0, 472, 436, 1, 0, 0, 0, 472, 437, 1, 0, 0, 0, 472, 438, 1, 0, 0, 0, 472, 439, 1, 0, 0, 0, 472, 450, 1, 0, 0, 0, 472, 461, 1, 0, 0, 0, 473, 69, 1, 0, 0, 0, 474, 477, 5, 53, 0, 0, 475, 477, 5, 69, 0, 0, 476, 474, 1, 0, 0, 0, 476, 475, 1, 0, 0, 0, 477, 71, 1, 0, 0, 0, 478, 482, 3, 64, 32, 0, 479, 480, 4, 36, 12, 0, 480, 482, 3, 70, 35, 0, 481, 478, 1, 0, 0, 0, 481, 479, 1, 0, 0, 0, 482, 73, 1, 0, 0, 0, 483, 484, 5, 9, 0, 0, 484, 485, 5, 32, 0, 0, 485, 75, 1, 0, 0, 0, 486, 487, 5, 14, 0, 0, 487, 492, 3, 78, 39, 0, 488, 489, 5, 39, 0, 0, 489, 491, 3, 78, 39, 0, 490, 488, 1, 0, 0, 0, 491, 494, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 77, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 497, 3, 10, 5, 0, 496, 498, 7, 4, 0, 0, 497, 496, 1, 0, 0, 0, 497, 498, 1, 0, 0, 0, 498, 501, 1, 0, 0, 0, 499, 500, 5, 51, 0, 0, 500, 502, 7, 5, 0, 0, 501, 499, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 79, 1, 0, 0, 0, 503, 504, 5, 8, 0, 0, 504, 505, 3, 62, 31, 0, 505, 81, 1, 0, 0, 0, 506, 507, 5, 2, 0, 0, 507, 508, 3, 62, 31, 0, 508, 83, 1, 0, 0, 0, 509, 510, 5, 11, 0, 0, 510, 515, 3, 86, 43, 0, 511, 512, 5, 39, 0, 0, 512, 514, 3, 86, 43, 0, 513, 511, 1, 0, 0, 0, 514, 517, 1, 0, 0, 0, 515, 513, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 85, 1, 0, 0, 0, 517, 515, 1, 0, 0, 0, 518, 519, 3, 60, 30, 0, 519, 520, 5, 89, 0, 0, 520, 521, 3, 60, 30, 0, 521, 87, 1, 0, 0, 0, 522, 523, 5, 1, 0, 0, 523, 524, 3, 20, 10, 0, 524, 526, 3, 106, 53, 0, 525, 527, 3, 94, 47, 0, 526, 525, 1, 0, 0, 0, 526, 527, 1, 0, 0, 0, 527, 89, 1, 0, 0, 0, 528, 529, 5, 7, 0, 0, 529, 530, 3, 20, 10, 0, 530, 531, 3, 106, 53, 0, 531, 91, 1, 0, 0, 0, 532, 533, 5, 10, 0, 0, 533, 534, 3, 58, 29, 0, 534, 93, 1, 0, 0, 0, 535, 540, 3, 96, 48, 0, 536, 537, 5, 39, 0, 0, 537, 539, 3, 96, 48, 0, 538, 536, 1, 0, 0, 0, 539, 542, 1, 0, 0, 0, 540, 538, 1, 0, 0, 0, 540, 541, 1, 0, 0, 0, 541, 95, 1, 0, 0, 0, 542, 540, 1, 0, 0, 0, 543, 544, 3, 64, 32, 0, 544, 545, 5, 37, 0, 0, 545, 546, 3, 68, 34, 0, 546, 97, 1, 0, 0, 0, 547, 548, 7, 6, 0, 0, 548, 99, 1, 0, 0, 0, 549, 552, 3, 102, 51, 0, 550, 552, 3, 104, 52, 0, 551, 549, 1, 0, 0, 0, 551, 550, 1, 0, 0, 0, 552, 101, 1, 0, 0, 0, 553, 555, 7, 0, 0, 0, 554, 553, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 557, 5, 33, 0, 0, 557, 103, 1, 0, 0, 0, 558, 560, 7, 0, 0, 0, 559, 558, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 5, 32, 0, 0, 562, 105, 1, 0, 0, 0, 563, 564, 5, 31, 0, 0, 564, 107, 1, 0, 0, 0, 565, 566, 7, 7, 0, 0, 566, 109, 1, 0, 0, 0, 567, 568, 5, 5, 0, 0, 568, 569, 3, 112, 56, 0, 569, 111, 1, 0, 0, 0, 570, 571, 5, 70, 0, 0, 571, 572, 3, 2, 1, 0, 572, 573, 5, 71, 0, 0, 573, 113, 1, 0, 0, 0, 574, 575, 5, 13, 0, 0, 575, 576, 5, 105, 0, 0, 576, 115, 1, 0, 0, 0, 577, 578, 5, 3, 0, 0, 578, 581, 5, 95, 0, 0, 579, 580, 5, 93, 0, 0, 580, 582, 3, 60, 30, 0, 581, 579, 1, 0, 0, 0, 581, 582, 1, 0, 0, 0, 582, 592, 1, 0, 0, 0, 583, 584, 5, 94, 0, 0, 584, 589, 3, 118, 59, 0, 585, 586, 5, 39, 0, 0, 586, 588, 3, 118, 59, 0, 587, 585, 1, 0, 0, 0, 588, 591, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 593, 1, 0, 0, 0, 591, 589, 1, 0, 0, 0, 592, 583, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 117, 1, 0, 0, 0, 594, 595, 3, 60, 30, 0, 595, 596, 5, 37, 0, 0, 596, 598, 1, 0, 0, 0, 597, 594, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 1, 0, 0, 0, 599, 600, 3, 60, 30, 0, 600, 119, 1, 0, 0, 0, 601, 602, 5, 18, 0, 0, 602, 603, 3, 36, 18, 0, 603, 604, 5, 93, 0, 0, 604, 605, 3, 62, 31, 0, 605, 121, 1, 0, 0, 0, 606, 607, 5, 17, 0, 0, 607, 610, 3, 54, 27, 0, 608, 609, 5, 34, 0, 0, 609, 611, 3, 30, 15, 0, 610, 608, 1, 0, 0, 0, 610, 611, 1, 0, 0, 0, 611, 123, 1, 0, 0, 0, 612, 614, 7, 8, 0, 0, 613, 612, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 5, 20, 0, 0, 616, 617, 3, 126, 63, 0, 617, 618, 3, 128, 64, 0, 618, 125, 1, 0, 0, 0, 619, 622, 3, 64, 32, 0, 620, 621, 5, 89, 0, 0, 621, 623, 3, 64, 32, 0, 622, 620, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 127, 1, 0, 0, 0, 624, 625, 5, 93, 0, 0, 625, 630, 3, 130, 65, 0, 626, 627, 5, 39, 0, 0, 627, 629, 3, 130, 65, 0, 628, 626, 1, 0, 0, 0, 629, 632, 1, 0, 0, 0, 630, 628, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 129, 1, 0, 0, 0, 632, 630, 1, 0, 0, 0, 633, 634, 3, 16, 8, 0, 634, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 207, 215, 217, 222, 229, 234, 245, 251, 259, 261, 272, 279, 290, 293, 309, 315, 325, 329, 334, 344, 352, 365, 369, 373, 380, 384, 391, 397, 404, 412, 420, 428, 445, 456, 467, 472, 476, 481, 492, 497, 501, 515, 526, 540, 551, 554, 559, 581, 589, 592, 597, 610, 613, 622, 630] \ No newline at end of file +[4, 1, 128, 635, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 245, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 251, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 259, 8, 9, 10, 9, 12, 9, 262, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 272, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 277, 8, 10, 10, 10, 12, 10, 280, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 288, 8, 11, 10, 11, 12, 11, 291, 9, 11, 3, 11, 293, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 307, 8, 15, 10, 15, 12, 15, 310, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 315, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 323, 8, 17, 10, 17, 12, 17, 326, 9, 17, 1, 17, 3, 17, 329, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 334, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 3, 21, 344, 8, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 350, 8, 22, 10, 22, 12, 22, 353, 9, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 363, 8, 24, 10, 24, 12, 24, 366, 9, 24, 1, 24, 3, 24, 369, 8, 24, 1, 24, 1, 24, 3, 24, 373, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 380, 8, 26, 1, 26, 1, 26, 3, 26, 384, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 389, 8, 27, 10, 27, 12, 27, 392, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 397, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 402, 8, 29, 10, 29, 12, 29, 405, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 410, 8, 30, 10, 30, 12, 30, 413, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 418, 8, 31, 10, 31, 12, 31, 421, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 428, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 443, 8, 34, 10, 34, 12, 34, 446, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 454, 8, 34, 10, 34, 12, 34, 457, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 465, 8, 34, 10, 34, 12, 34, 468, 9, 34, 1, 34, 1, 34, 3, 34, 472, 8, 34, 1, 35, 1, 35, 3, 35, 476, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 481, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 490, 8, 38, 10, 38, 12, 38, 493, 9, 38, 1, 39, 1, 39, 3, 39, 497, 8, 39, 1, 39, 1, 39, 3, 39, 501, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 513, 8, 42, 10, 42, 12, 42, 516, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 526, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 538, 8, 47, 10, 47, 12, 47, 541, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 551, 8, 50, 1, 51, 3, 51, 554, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 559, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 581, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 587, 8, 58, 10, 58, 12, 58, 590, 9, 58, 3, 58, 592, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 597, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 610, 8, 61, 1, 62, 3, 62, 613, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 622, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 628, 8, 64, 10, 64, 12, 64, 631, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 660, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 244, 1, 0, 0, 0, 18, 250, 1, 0, 0, 0, 20, 271, 1, 0, 0, 0, 22, 281, 1, 0, 0, 0, 24, 296, 1, 0, 0, 0, 26, 298, 1, 0, 0, 0, 28, 300, 1, 0, 0, 0, 30, 303, 1, 0, 0, 0, 32, 314, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 333, 1, 0, 0, 0, 38, 337, 1, 0, 0, 0, 40, 339, 1, 0, 0, 0, 42, 343, 1, 0, 0, 0, 44, 345, 1, 0, 0, 0, 46, 354, 1, 0, 0, 0, 48, 358, 1, 0, 0, 0, 50, 374, 1, 0, 0, 0, 52, 377, 1, 0, 0, 0, 54, 385, 1, 0, 0, 0, 56, 393, 1, 0, 0, 0, 58, 398, 1, 0, 0, 0, 60, 406, 1, 0, 0, 0, 62, 414, 1, 0, 0, 0, 64, 422, 1, 0, 0, 0, 66, 427, 1, 0, 0, 0, 68, 471, 1, 0, 0, 0, 70, 475, 1, 0, 0, 0, 72, 480, 1, 0, 0, 0, 74, 482, 1, 0, 0, 0, 76, 485, 1, 0, 0, 0, 78, 494, 1, 0, 0, 0, 80, 502, 1, 0, 0, 0, 82, 505, 1, 0, 0, 0, 84, 508, 1, 0, 0, 0, 86, 517, 1, 0, 0, 0, 88, 521, 1, 0, 0, 0, 90, 527, 1, 0, 0, 0, 92, 531, 1, 0, 0, 0, 94, 534, 1, 0, 0, 0, 96, 542, 1, 0, 0, 0, 98, 546, 1, 0, 0, 0, 100, 550, 1, 0, 0, 0, 102, 553, 1, 0, 0, 0, 104, 558, 1, 0, 0, 0, 106, 562, 1, 0, 0, 0, 108, 564, 1, 0, 0, 0, 110, 566, 1, 0, 0, 0, 112, 569, 1, 0, 0, 0, 114, 573, 1, 0, 0, 0, 116, 576, 1, 0, 0, 0, 118, 596, 1, 0, 0, 0, 120, 600, 1, 0, 0, 0, 122, 605, 1, 0, 0, 0, 124, 612, 1, 0, 0, 0, 126, 618, 1, 0, 0, 0, 128, 623, 1, 0, 0, 0, 130, 632, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 34, 17, 0, 148, 153, 3, 28, 14, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 236, 3, 58, 29, 0, 236, 237, 5, 38, 0, 0, 237, 238, 3, 68, 34, 0, 238, 15, 1, 0, 0, 0, 239, 245, 3, 18, 9, 0, 240, 241, 3, 18, 9, 0, 241, 242, 3, 108, 54, 0, 242, 243, 3, 18, 9, 0, 243, 245, 1, 0, 0, 0, 244, 239, 1, 0, 0, 0, 244, 240, 1, 0, 0, 0, 245, 17, 1, 0, 0, 0, 246, 247, 6, 9, -1, 0, 247, 251, 3, 20, 10, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 3, 250, 246, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 251, 260, 1, 0, 0, 0, 252, 253, 10, 2, 0, 0, 253, 254, 7, 1, 0, 0, 254, 259, 3, 18, 9, 3, 255, 256, 10, 1, 0, 0, 256, 257, 7, 0, 0, 0, 257, 259, 3, 18, 9, 2, 258, 252, 1, 0, 0, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 19, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 6, 10, -1, 0, 264, 272, 3, 68, 34, 0, 265, 272, 3, 58, 29, 0, 266, 272, 3, 22, 11, 0, 267, 268, 5, 48, 0, 0, 268, 269, 3, 10, 5, 0, 269, 270, 5, 55, 0, 0, 270, 272, 1, 0, 0, 0, 271, 263, 1, 0, 0, 0, 271, 265, 1, 0, 0, 0, 271, 266, 1, 0, 0, 0, 271, 267, 1, 0, 0, 0, 272, 278, 1, 0, 0, 0, 273, 274, 10, 1, 0, 0, 274, 275, 5, 37, 0, 0, 275, 277, 3, 26, 13, 0, 276, 273, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 21, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 3, 24, 12, 0, 282, 292, 5, 48, 0, 0, 283, 293, 5, 66, 0, 0, 284, 289, 3, 10, 5, 0, 285, 286, 5, 39, 0, 0, 286, 288, 3, 10, 5, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 283, 1, 0, 0, 0, 292, 284, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 5, 55, 0, 0, 295, 23, 1, 0, 0, 0, 296, 297, 3, 72, 36, 0, 297, 25, 1, 0, 0, 0, 298, 299, 3, 64, 32, 0, 299, 27, 1, 0, 0, 0, 300, 301, 5, 12, 0, 0, 301, 302, 3, 30, 15, 0, 302, 29, 1, 0, 0, 0, 303, 308, 3, 32, 16, 0, 304, 305, 5, 39, 0, 0, 305, 307, 3, 32, 16, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 31, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 3, 58, 29, 0, 312, 313, 5, 36, 0, 0, 313, 315, 1, 0, 0, 0, 314, 311, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 3, 10, 5, 0, 317, 33, 1, 0, 0, 0, 318, 319, 5, 6, 0, 0, 319, 324, 3, 36, 18, 0, 320, 321, 5, 39, 0, 0, 321, 323, 3, 36, 18, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 42, 21, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 35, 1, 0, 0, 0, 330, 331, 3, 38, 19, 0, 331, 332, 5, 38, 0, 0, 332, 334, 1, 0, 0, 0, 333, 330, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 3, 40, 20, 0, 336, 37, 1, 0, 0, 0, 337, 338, 5, 81, 0, 0, 338, 39, 1, 0, 0, 0, 339, 340, 7, 2, 0, 0, 340, 41, 1, 0, 0, 0, 341, 344, 3, 44, 22, 0, 342, 344, 3, 46, 23, 0, 343, 341, 1, 0, 0, 0, 343, 342, 1, 0, 0, 0, 344, 43, 1, 0, 0, 0, 345, 346, 5, 80, 0, 0, 346, 351, 5, 81, 0, 0, 347, 348, 5, 39, 0, 0, 348, 350, 5, 81, 0, 0, 349, 347, 1, 0, 0, 0, 350, 353, 1, 0, 0, 0, 351, 349, 1, 0, 0, 0, 351, 352, 1, 0, 0, 0, 352, 45, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 354, 355, 5, 70, 0, 0, 355, 356, 3, 44, 22, 0, 356, 357, 5, 71, 0, 0, 357, 47, 1, 0, 0, 0, 358, 359, 5, 19, 0, 0, 359, 364, 3, 36, 18, 0, 360, 361, 5, 39, 0, 0, 361, 363, 3, 36, 18, 0, 362, 360, 1, 0, 0, 0, 363, 366, 1, 0, 0, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 368, 1, 0, 0, 0, 366, 364, 1, 0, 0, 0, 367, 369, 3, 54, 27, 0, 368, 367, 1, 0, 0, 0, 368, 369, 1, 0, 0, 0, 369, 372, 1, 0, 0, 0, 370, 371, 5, 33, 0, 0, 371, 373, 3, 30, 15, 0, 372, 370, 1, 0, 0, 0, 372, 373, 1, 0, 0, 0, 373, 49, 1, 0, 0, 0, 374, 375, 5, 4, 0, 0, 375, 376, 3, 30, 15, 0, 376, 51, 1, 0, 0, 0, 377, 379, 5, 15, 0, 0, 378, 380, 3, 54, 27, 0, 379, 378, 1, 0, 0, 0, 379, 380, 1, 0, 0, 0, 380, 383, 1, 0, 0, 0, 381, 382, 5, 33, 0, 0, 382, 384, 3, 30, 15, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 53, 1, 0, 0, 0, 385, 390, 3, 56, 28, 0, 386, 387, 5, 39, 0, 0, 387, 389, 3, 56, 28, 0, 388, 386, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 388, 1, 0, 0, 0, 390, 391, 1, 0, 0, 0, 391, 55, 1, 0, 0, 0, 392, 390, 1, 0, 0, 0, 393, 396, 3, 32, 16, 0, 394, 395, 5, 16, 0, 0, 395, 397, 3, 10, 5, 0, 396, 394, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 57, 1, 0, 0, 0, 398, 403, 3, 72, 36, 0, 399, 400, 5, 41, 0, 0, 400, 402, 3, 72, 36, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 59, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 411, 3, 66, 33, 0, 407, 408, 5, 41, 0, 0, 408, 410, 3, 66, 33, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 61, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 419, 3, 60, 30, 0, 415, 416, 5, 39, 0, 0, 416, 418, 3, 60, 30, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 63, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 423, 7, 3, 0, 0, 423, 65, 1, 0, 0, 0, 424, 428, 5, 85, 0, 0, 425, 426, 4, 33, 10, 0, 426, 428, 3, 70, 35, 0, 427, 424, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 428, 67, 1, 0, 0, 0, 429, 472, 5, 50, 0, 0, 430, 431, 3, 104, 52, 0, 431, 432, 5, 72, 0, 0, 432, 472, 1, 0, 0, 0, 433, 472, 3, 102, 51, 0, 434, 472, 3, 104, 52, 0, 435, 472, 3, 98, 49, 0, 436, 472, 3, 70, 35, 0, 437, 472, 3, 106, 53, 0, 438, 439, 5, 70, 0, 0, 439, 444, 3, 100, 50, 0, 440, 441, 5, 39, 0, 0, 441, 443, 3, 100, 50, 0, 442, 440, 1, 0, 0, 0, 443, 446, 1, 0, 0, 0, 444, 442, 1, 0, 0, 0, 444, 445, 1, 0, 0, 0, 445, 447, 1, 0, 0, 0, 446, 444, 1, 0, 0, 0, 447, 448, 5, 71, 0, 0, 448, 472, 1, 0, 0, 0, 449, 450, 5, 70, 0, 0, 450, 455, 3, 98, 49, 0, 451, 452, 5, 39, 0, 0, 452, 454, 3, 98, 49, 0, 453, 451, 1, 0, 0, 0, 454, 457, 1, 0, 0, 0, 455, 453, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 458, 1, 0, 0, 0, 457, 455, 1, 0, 0, 0, 458, 459, 5, 71, 0, 0, 459, 472, 1, 0, 0, 0, 460, 461, 5, 70, 0, 0, 461, 466, 3, 106, 53, 0, 462, 463, 5, 39, 0, 0, 463, 465, 3, 106, 53, 0, 464, 462, 1, 0, 0, 0, 465, 468, 1, 0, 0, 0, 466, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 469, 1, 0, 0, 0, 468, 466, 1, 0, 0, 0, 469, 470, 5, 71, 0, 0, 470, 472, 1, 0, 0, 0, 471, 429, 1, 0, 0, 0, 471, 430, 1, 0, 0, 0, 471, 433, 1, 0, 0, 0, 471, 434, 1, 0, 0, 0, 471, 435, 1, 0, 0, 0, 471, 436, 1, 0, 0, 0, 471, 437, 1, 0, 0, 0, 471, 438, 1, 0, 0, 0, 471, 449, 1, 0, 0, 0, 471, 460, 1, 0, 0, 0, 472, 69, 1, 0, 0, 0, 473, 476, 5, 53, 0, 0, 474, 476, 5, 69, 0, 0, 475, 473, 1, 0, 0, 0, 475, 474, 1, 0, 0, 0, 476, 71, 1, 0, 0, 0, 477, 481, 3, 64, 32, 0, 478, 479, 4, 36, 11, 0, 479, 481, 3, 70, 35, 0, 480, 477, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 73, 1, 0, 0, 0, 482, 483, 5, 9, 0, 0, 483, 484, 5, 31, 0, 0, 484, 75, 1, 0, 0, 0, 485, 486, 5, 14, 0, 0, 486, 491, 3, 78, 39, 0, 487, 488, 5, 39, 0, 0, 488, 490, 3, 78, 39, 0, 489, 487, 1, 0, 0, 0, 490, 493, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 77, 1, 0, 0, 0, 493, 491, 1, 0, 0, 0, 494, 496, 3, 10, 5, 0, 495, 497, 7, 4, 0, 0, 496, 495, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 500, 1, 0, 0, 0, 498, 499, 5, 51, 0, 0, 499, 501, 7, 5, 0, 0, 500, 498, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 79, 1, 0, 0, 0, 502, 503, 5, 8, 0, 0, 503, 504, 3, 62, 31, 0, 504, 81, 1, 0, 0, 0, 505, 506, 5, 2, 0, 0, 506, 507, 3, 62, 31, 0, 507, 83, 1, 0, 0, 0, 508, 509, 5, 11, 0, 0, 509, 514, 3, 86, 43, 0, 510, 511, 5, 39, 0, 0, 511, 513, 3, 86, 43, 0, 512, 510, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 512, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 85, 1, 0, 0, 0, 516, 514, 1, 0, 0, 0, 517, 518, 3, 60, 30, 0, 518, 519, 5, 89, 0, 0, 519, 520, 3, 60, 30, 0, 520, 87, 1, 0, 0, 0, 521, 522, 5, 1, 0, 0, 522, 523, 3, 20, 10, 0, 523, 525, 3, 106, 53, 0, 524, 526, 3, 94, 47, 0, 525, 524, 1, 0, 0, 0, 525, 526, 1, 0, 0, 0, 526, 89, 1, 0, 0, 0, 527, 528, 5, 7, 0, 0, 528, 529, 3, 20, 10, 0, 529, 530, 3, 106, 53, 0, 530, 91, 1, 0, 0, 0, 531, 532, 5, 10, 0, 0, 532, 533, 3, 58, 29, 0, 533, 93, 1, 0, 0, 0, 534, 539, 3, 96, 48, 0, 535, 536, 5, 39, 0, 0, 536, 538, 3, 96, 48, 0, 537, 535, 1, 0, 0, 0, 538, 541, 1, 0, 0, 0, 539, 537, 1, 0, 0, 0, 539, 540, 1, 0, 0, 0, 540, 95, 1, 0, 0, 0, 541, 539, 1, 0, 0, 0, 542, 543, 3, 64, 32, 0, 543, 544, 5, 36, 0, 0, 544, 545, 3, 68, 34, 0, 545, 97, 1, 0, 0, 0, 546, 547, 7, 6, 0, 0, 547, 99, 1, 0, 0, 0, 548, 551, 3, 102, 51, 0, 549, 551, 3, 104, 52, 0, 550, 548, 1, 0, 0, 0, 550, 549, 1, 0, 0, 0, 551, 101, 1, 0, 0, 0, 552, 554, 7, 0, 0, 0, 553, 552, 1, 0, 0, 0, 553, 554, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0, 555, 556, 5, 32, 0, 0, 556, 103, 1, 0, 0, 0, 557, 559, 7, 0, 0, 0, 558, 557, 1, 0, 0, 0, 558, 559, 1, 0, 0, 0, 559, 560, 1, 0, 0, 0, 560, 561, 5, 31, 0, 0, 561, 105, 1, 0, 0, 0, 562, 563, 5, 30, 0, 0, 563, 107, 1, 0, 0, 0, 564, 565, 7, 7, 0, 0, 565, 109, 1, 0, 0, 0, 566, 567, 5, 5, 0, 0, 567, 568, 3, 112, 56, 0, 568, 111, 1, 0, 0, 0, 569, 570, 5, 70, 0, 0, 570, 571, 3, 2, 1, 0, 571, 572, 5, 71, 0, 0, 572, 113, 1, 0, 0, 0, 573, 574, 5, 13, 0, 0, 574, 575, 5, 105, 0, 0, 575, 115, 1, 0, 0, 0, 576, 577, 5, 3, 0, 0, 577, 580, 5, 95, 0, 0, 578, 579, 5, 93, 0, 0, 579, 581, 3, 60, 30, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 591, 1, 0, 0, 0, 582, 583, 5, 94, 0, 0, 583, 588, 3, 118, 59, 0, 584, 585, 5, 39, 0, 0, 585, 587, 3, 118, 59, 0, 586, 584, 1, 0, 0, 0, 587, 590, 1, 0, 0, 0, 588, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 592, 1, 0, 0, 0, 590, 588, 1, 0, 0, 0, 591, 582, 1, 0, 0, 0, 591, 592, 1, 0, 0, 0, 592, 117, 1, 0, 0, 0, 593, 594, 3, 60, 30, 0, 594, 595, 5, 36, 0, 0, 595, 597, 1, 0, 0, 0, 596, 593, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 599, 3, 60, 30, 0, 599, 119, 1, 0, 0, 0, 600, 601, 5, 18, 0, 0, 601, 602, 3, 36, 18, 0, 602, 603, 5, 93, 0, 0, 603, 604, 3, 62, 31, 0, 604, 121, 1, 0, 0, 0, 605, 606, 5, 17, 0, 0, 606, 609, 3, 54, 27, 0, 607, 608, 5, 33, 0, 0, 608, 610, 3, 30, 15, 0, 609, 607, 1, 0, 0, 0, 609, 610, 1, 0, 0, 0, 610, 123, 1, 0, 0, 0, 611, 613, 7, 8, 0, 0, 612, 611, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 5, 20, 0, 0, 615, 616, 3, 126, 63, 0, 616, 617, 3, 128, 64, 0, 617, 125, 1, 0, 0, 0, 618, 621, 3, 64, 32, 0, 619, 620, 5, 89, 0, 0, 620, 622, 3, 64, 32, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 127, 1, 0, 0, 0, 623, 624, 5, 93, 0, 0, 624, 629, 3, 130, 65, 0, 625, 626, 5, 39, 0, 0, 626, 628, 3, 130, 65, 0, 627, 625, 1, 0, 0, 0, 628, 631, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 129, 1, 0, 0, 0, 631, 629, 1, 0, 0, 0, 632, 633, 3, 16, 8, 0, 633, 131, 1, 0, 0, 0, 61, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 244, 250, 258, 260, 271, 278, 289, 292, 308, 314, 324, 328, 333, 343, 351, 364, 368, 372, 379, 383, 390, 396, 403, 411, 419, 427, 444, 455, 466, 471, 475, 480, 491, 496, 500, 514, 525, 539, 550, 553, 558, 580, 588, 591, 596, 609, 612, 621, 629] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index 3bf6795c4e1dc..e864eaff3edd7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -29,9 +29,9 @@ public class EsqlBaseParser extends ParserConfig { LIMIT=9, MV_EXPAND=10, RENAME=11, ROW=12, SHOW=13, SORT=14, STATS=15, WHERE=16, DEV_INLINESTATS=17, DEV_LOOKUP=18, DEV_METRICS=19, DEV_JOIN=20, DEV_JOIN_FULL=21, DEV_JOIN_LEFT=22, DEV_JOIN_RIGHT=23, DEV_JOIN_LOOKUP=24, - UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, COLON=29, - PIPE=30, QUOTED_STRING=31, INTEGER_LITERAL=32, DECIMAL_LITERAL=33, BY=34, - AND=35, ASC=36, ASSIGN=37, CAST_OP=38, COMMA=39, DESC=40, DOT=41, FALSE=42, + UNKNOWN_CMD=25, LINE_COMMENT=26, MULTILINE_COMMENT=27, WS=28, PIPE=29, + QUOTED_STRING=30, INTEGER_LITERAL=31, DECIMAL_LITERAL=32, BY=33, AND=34, + ASC=35, ASSIGN=36, CAST_OP=37, COLON=38, COMMA=39, DESC=40, DOT=41, FALSE=42, FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, @@ -99,8 +99,8 @@ private static String[] makeLiteralNames() { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'keep'", "'limit'", "'mv_expand'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, null, null, null, null, null, null, - null, null, null, null, null, "':'", "'|'", null, null, null, "'by'", - "'and'", "'asc'", "'='", "'::'", "','", "'desc'", "'.'", "'false'", "'first'", + null, null, null, null, null, "'|'", null, null, null, "'by'", "'and'", + "'asc'", "'='", "'::'", "':'", "','", "'desc'", "'.'", "'false'", "'first'", "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, @@ -118,28 +118,28 @@ private static String[] makeSymbolicNames() { "KEEP", "LIMIT", "MV_EXPAND", "RENAME", "ROW", "SHOW", "SORT", "STATS", "WHERE", "DEV_INLINESTATS", "DEV_LOOKUP", "DEV_METRICS", "DEV_JOIN", "DEV_JOIN_FULL", "DEV_JOIN_LEFT", "DEV_JOIN_RIGHT", "DEV_JOIN_LOOKUP", - "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "COLON", "PIPE", - "QUOTED_STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", - "ASSIGN", "CAST_OP", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", - "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", - "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", - "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", - "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", - "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", - "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", - "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", - "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", - "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", - "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", - "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", - "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", - "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", - "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", - "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", - "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", - "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" + "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "PIPE", "QUOTED_STRING", + "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "CAST_OP", + "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", + "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", + "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", + "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", + "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", + "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", + "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", + "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", + "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", + "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", + "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", + "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", + "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -907,7 +907,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(207); + setState(206); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -1012,14 +1012,12 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; setState(205); - if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(206); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(217); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1027,7 +1025,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(215); + setState(214); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1035,11 +1033,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(209); + setState(208); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(210); + setState(209); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(211); + setState(210); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1048,18 +1046,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(212); + setState(211); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(213); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(214); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(219); + setState(218); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1114,48 +1112,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(234); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(220); + setState(219); valueExpression(); - setState(222); + setState(221); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(221); + setState(220); match(NOT); } } - setState(224); + setState(223); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(225); + setState(224); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(227); + setState(226); valueExpression(); - setState(229); + setState(228); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(228); + setState(227); match(NOT); } } - setState(231); + setState(230); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(232); + setState(231); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1209,11 +1207,11 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(236); + setState(235); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(237); + setState(236); match(COLON); - setState(238); + setState(237); ((MatchBooleanExpressionContext)_localctx).queryString = constant(); } } @@ -1297,14 +1295,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(245); + setState(244); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(240); + setState(239); operatorExpression(0); } break; @@ -1312,11 +1310,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(241); + setState(240); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(242); + setState(241); comparisonOperator(); - setState(243); + setState(242); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1441,7 +1439,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(251); + setState(250); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1450,7 +1448,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(248); + setState(247); primaryExpression(0); } break; @@ -1459,7 +1457,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(249); + setState(248); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1470,13 +1468,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(250); + setState(249); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(261); + setState(260); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1484,7 +1482,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(259); + setState(258); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1492,9 +1490,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(253); + setState(252); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(254); + setState(253); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { @@ -1505,7 +1503,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(255); + setState(254); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1514,9 +1512,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(256); + setState(255); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(257); + setState(256); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1527,14 +1525,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(258); + setState(257); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(263); + setState(262); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1692,7 +1690,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(272); + setState(271); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: @@ -1701,7 +1699,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(265); + setState(264); constant(); } break; @@ -1710,7 +1708,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(266); + setState(265); qualifiedName(); } break; @@ -1719,7 +1717,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(267); + setState(266); functionExpression(); } break; @@ -1728,17 +1726,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(268); + setState(267); match(LP); - setState(269); + setState(268); booleanExpression(0); - setState(270); + setState(269); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(279); + setState(278); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1749,16 +1747,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(274); + setState(273); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(275); + setState(274); match(CAST_OP); - setState(276); + setState(275); dataType(); } } } - setState(281); + setState(280); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,17,_ctx); } @@ -1820,37 +1818,37 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(282); + setState(281); functionName(); - setState(283); + setState(282); match(LP); - setState(293); + setState(292); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,19,_ctx) ) { case 1: { - setState(284); + setState(283); match(ASTERISK); } break; case 2: { { - setState(285); + setState(284); booleanExpression(0); - setState(290); + setState(289); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(286); + setState(285); match(COMMA); - setState(287); + setState(286); booleanExpression(0); } } - setState(292); + setState(291); _errHandler.sync(this); _la = _input.LA(1); } @@ -1858,7 +1856,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } break; } - setState(295); + setState(294); match(RP); } } @@ -1904,7 +1902,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(297); + setState(296); identifierOrParameter(); } } @@ -1962,7 +1960,7 @@ public final DataTypeContext dataType() throws RecognitionException { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(299); + setState(298); identifier(); } } @@ -2009,9 +2007,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(301); + setState(300); match(ROW); - setState(302); + setState(301); fields(); } } @@ -2065,23 +2063,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(304); + setState(303); field(); - setState(309); + setState(308); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(305); + setState(304); match(COMMA); - setState(306); + setState(305); field(); } } } - setState(311); + setState(310); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,20,_ctx); } @@ -2133,19 +2131,19 @@ public final FieldContext field() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(315); + setState(314); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(312); + setState(311); qualifiedName(); - setState(313); + setState(312); match(ASSIGN); } break; } - setState(317); + setState(316); booleanExpression(0); } } @@ -2203,34 +2201,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(319); + setState(318); match(FROM); - setState(320); + setState(319); indexPattern(); - setState(325); + setState(324); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(321); + setState(320); match(COMMA); - setState(322); + setState(321); indexPattern(); } } } - setState(327); + setState(326); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,22,_ctx); } - setState(329); + setState(328); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,23,_ctx) ) { case 1: { - setState(328); + setState(327); metadata(); } break; @@ -2283,19 +2281,19 @@ public final IndexPatternContext indexPattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(334); + setState(333); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(331); + setState(330); clusterString(); - setState(332); + setState(331); match(COLON); } break; } - setState(336); + setState(335); indexString(); } } @@ -2339,7 +2337,7 @@ public final ClusterStringContext clusterString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(338); + setState(337); match(UNQUOTED_SOURCE); } } @@ -2385,7 +2383,7 @@ public final IndexStringContext indexString() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(340); + setState(339); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2440,20 +2438,20 @@ public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); enterRule(_localctx, 42, RULE_metadata); try { - setState(344); + setState(343); _errHandler.sync(this); switch (_input.LA(1)) { case METADATA: enterOuterAlt(_localctx, 1); { - setState(342); + setState(341); metadataOption(); } break; case OPENING_BRACKET: enterOuterAlt(_localctx, 2); { - setState(343); + setState(342); deprecated_metadata(); } break; @@ -2510,25 +2508,25 @@ public final MetadataOptionContext metadataOption() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(346); + setState(345); match(METADATA); - setState(347); + setState(346); match(UNQUOTED_SOURCE); - setState(352); + setState(351); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(348); + setState(347); match(COMMA); - setState(349); + setState(348); match(UNQUOTED_SOURCE); } } } - setState(354); + setState(353); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,26,_ctx); } @@ -2577,11 +2575,11 @@ public final Deprecated_metadataContext deprecated_metadata() throws Recognition try { enterOuterAlt(_localctx, 1); { - setState(355); + setState(354); match(OPENING_BRACKET); - setState(356); + setState(355); metadataOption(); - setState(357); + setState(356); match(CLOSING_BRACKET); } } @@ -2645,46 +2643,46 @@ public final MetricsCommandContext metricsCommand() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(359); + setState(358); match(DEV_METRICS); - setState(360); + setState(359); indexPattern(); - setState(365); + setState(364); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(361); + setState(360); match(COMMA); - setState(362); + setState(361); indexPattern(); } } } - setState(367); + setState(366); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } - setState(369); + setState(368); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { case 1: { - setState(368); + setState(367); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(373); + setState(372); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { case 1: { - setState(371); + setState(370); match(BY); - setState(372); + setState(371); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2734,9 +2732,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(375); + setState(374); match(EVAL); - setState(376); + setState(375); fields(); } } @@ -2789,26 +2787,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(378); + setState(377); match(STATS); - setState(380); + setState(379); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(379); + setState(378); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(384); + setState(383); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(382); + setState(381); match(BY); - setState(383); + setState(382); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2865,23 +2863,23 @@ public final AggFieldsContext aggFields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(386); + setState(385); aggField(); - setState(391); + setState(390); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(387); + setState(386); match(COMMA); - setState(388); + setState(387); aggField(); } } } - setState(393); + setState(392); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,32,_ctx); } @@ -2933,16 +2931,16 @@ public final AggFieldContext aggField() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(394); + setState(393); field(); - setState(397); + setState(396); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(395); + setState(394); match(WHERE); - setState(396); + setState(395); booleanExpression(0); } break; @@ -2999,23 +2997,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(399); + setState(398); identifierOrParameter(); - setState(404); + setState(403); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(400); + setState(399); match(DOT); - setState(401); + setState(400); identifierOrParameter(); } } } - setState(406); + setState(405); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } @@ -3071,23 +3069,23 @@ public final QualifiedNamePatternContext qualifiedNamePattern() throws Recogniti int _alt; enterOuterAlt(_localctx, 1); { - setState(407); + setState(406); identifierPattern(); - setState(412); + setState(411); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(408); + setState(407); match(DOT); - setState(409); + setState(408); identifierPattern(); } } } - setState(414); + setState(413); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,35,_ctx); } @@ -3143,23 +3141,23 @@ public final QualifiedNamePatternsContext qualifiedNamePatterns() throws Recogni int _alt; enterOuterAlt(_localctx, 1); { - setState(415); + setState(414); qualifiedNamePattern(); - setState(420); + setState(419); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(416); + setState(415); match(COMMA); - setState(417); + setState(416); qualifiedNamePattern(); } } } - setState(422); + setState(421); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } @@ -3207,7 +3205,7 @@ public final IdentifierContext identifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(423); + setState(422); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3260,22 +3258,22 @@ public final IdentifierPatternContext identifierPattern() throws RecognitionExce IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(428); + setState(427); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(425); + setState(424); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(426); + setState(425); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(427); + setState(426); parameter(); } break; @@ -3548,14 +3546,14 @@ public final ConstantContext constant() throws RecognitionException { enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(472); + setState(471); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(430); + setState(429); match(NULL); } break; @@ -3563,9 +3561,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(431); + setState(430); integerValue(); - setState(432); + setState(431); match(UNQUOTED_IDENTIFIER); } break; @@ -3573,7 +3571,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(434); + setState(433); decimalValue(); } break; @@ -3581,7 +3579,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(435); + setState(434); integerValue(); } break; @@ -3589,7 +3587,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(436); + setState(435); booleanValue(); } break; @@ -3597,7 +3595,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(437); + setState(436); parameter(); } break; @@ -3605,7 +3603,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(438); + setState(437); string(); } break; @@ -3613,27 +3611,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(439); + setState(438); match(OPENING_BRACKET); - setState(440); + setState(439); numericValue(); - setState(445); + setState(444); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(441); + setState(440); match(COMMA); - setState(442); + setState(441); numericValue(); } } - setState(447); + setState(446); _errHandler.sync(this); _la = _input.LA(1); } - setState(448); + setState(447); match(CLOSING_BRACKET); } break; @@ -3641,27 +3639,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(450); + setState(449); match(OPENING_BRACKET); - setState(451); + setState(450); booleanValue(); - setState(456); + setState(455); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(452); + setState(451); match(COMMA); - setState(453); + setState(452); booleanValue(); } } - setState(458); + setState(457); _errHandler.sync(this); _la = _input.LA(1); } - setState(459); + setState(458); match(CLOSING_BRACKET); } break; @@ -3669,27 +3667,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(461); + setState(460); match(OPENING_BRACKET); - setState(462); + setState(461); string(); - setState(467); + setState(466); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(463); + setState(462); match(COMMA); - setState(464); + setState(463); string(); } } - setState(469); + setState(468); _errHandler.sync(this); _la = _input.LA(1); } - setState(470); + setState(469); match(CLOSING_BRACKET); } break; @@ -3763,14 +3761,14 @@ public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); enterRule(_localctx, 70, RULE_parameter); try { - setState(476); + setState(475); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(474); + setState(473); match(PARAM); } break; @@ -3778,7 +3776,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(475); + setState(474); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3829,22 +3827,22 @@ public final IdentifierOrParameterContext identifierOrParameter() throws Recogni IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(481); + setState(480); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(478); + setState(477); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(479); + setState(478); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(480); + setState(479); parameter(); } break; @@ -3891,9 +3889,9 @@ public final LimitCommandContext limitCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(483); + setState(482); match(LIMIT); - setState(484); + setState(483); match(INTEGER_LITERAL); } } @@ -3948,25 +3946,25 @@ public final SortCommandContext sortCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(486); + setState(485); match(SORT); - setState(487); + setState(486); orderExpression(); - setState(492); + setState(491); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(488); + setState(487); match(COMMA); - setState(489); + setState(488); orderExpression(); } } } - setState(494); + setState(493); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,44,_ctx); } @@ -4022,14 +4020,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(495); + setState(494); booleanExpression(0); - setState(497); + setState(496); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: { - setState(496); + setState(495); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -4043,14 +4041,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(501); + setState(500); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { case 1: { - setState(499); + setState(498); match(NULLS); - setState(500); + setState(499); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4109,9 +4107,9 @@ public final KeepCommandContext keepCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(503); + setState(502); match(KEEP); - setState(504); + setState(503); qualifiedNamePatterns(); } } @@ -4158,9 +4156,9 @@ public final DropCommandContext dropCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(506); + setState(505); match(DROP); - setState(507); + setState(506); qualifiedNamePatterns(); } } @@ -4215,25 +4213,25 @@ public final RenameCommandContext renameCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(509); + setState(508); match(RENAME); - setState(510); + setState(509); renameClause(); - setState(515); + setState(514); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(511); + setState(510); match(COMMA); - setState(512); + setState(511); renameClause(); } } } - setState(517); + setState(516); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,47,_ctx); } @@ -4287,11 +4285,11 @@ public final RenameClauseContext renameClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(518); + setState(517); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(519); + setState(518); match(AS); - setState(520); + setState(519); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4344,18 +4342,18 @@ public final DissectCommandContext dissectCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(522); + setState(521); match(DISSECT); - setState(523); + setState(522); primaryExpression(0); - setState(524); + setState(523); string(); - setState(526); + setState(525); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(525); + setState(524); commandOptions(); } break; @@ -4408,11 +4406,11 @@ public final GrokCommandContext grokCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(528); + setState(527); match(GROK); - setState(529); + setState(528); primaryExpression(0); - setState(530); + setState(529); string(); } } @@ -4459,9 +4457,9 @@ public final MvExpandCommandContext mvExpandCommand() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(532); + setState(531); match(MV_EXPAND); - setState(533); + setState(532); qualifiedName(); } } @@ -4515,23 +4513,23 @@ public final CommandOptionsContext commandOptions() throws RecognitionException int _alt; enterOuterAlt(_localctx, 1); { - setState(535); + setState(534); commandOption(); - setState(540); + setState(539); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(536); + setState(535); match(COMMA); - setState(537); + setState(536); commandOption(); } } } - setState(542); + setState(541); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } @@ -4583,11 +4581,11 @@ public final CommandOptionContext commandOption() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(543); + setState(542); identifier(); - setState(544); + setState(543); match(ASSIGN); - setState(545); + setState(544); constant(); } } @@ -4633,7 +4631,7 @@ public final BooleanValueContext booleanValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(547); + setState(546); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4688,20 +4686,20 @@ public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); enterRule(_localctx, 100, RULE_numericValue); try { - setState(551); + setState(550); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(549); + setState(548); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(550); + setState(549); integerValue(); } break; @@ -4750,12 +4748,12 @@ public final DecimalValueContext decimalValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(554); + setState(553); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(553); + setState(552); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4768,7 +4766,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(556); + setState(555); match(DECIMAL_LITERAL); } } @@ -4815,12 +4813,12 @@ public final IntegerValueContext integerValue() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(559); + setState(558); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(558); + setState(557); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4833,7 +4831,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(561); + setState(560); match(INTEGER_LITERAL); } } @@ -4877,7 +4875,7 @@ public final StringContext string() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(563); + setState(562); match(QUOTED_STRING); } } @@ -4927,7 +4925,7 @@ public final ComparisonOperatorContext comparisonOperator() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(565); + setState(564); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -4982,9 +4980,9 @@ public final ExplainCommandContext explainCommand() throws RecognitionException try { enterOuterAlt(_localctx, 1); { - setState(567); + setState(566); match(EXPLAIN); - setState(568); + setState(567); subqueryExpression(); } } @@ -5032,11 +5030,11 @@ public final SubqueryExpressionContext subqueryExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(570); + setState(569); match(OPENING_BRACKET); - setState(571); + setState(570); query(0); - setState(572); + setState(571); match(CLOSING_BRACKET); } } @@ -5093,9 +5091,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(574); + setState(573); match(SHOW); - setState(575); + setState(574); match(INFO); } } @@ -5158,46 +5156,46 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(577); + setState(576); match(ENRICH); - setState(578); + setState(577); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(581); + setState(580); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { case 1: { - setState(579); + setState(578); match(ON); - setState(580); + setState(579); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(592); + setState(591); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(583); + setState(582); match(WITH); - setState(584); + setState(583); enrichWithClause(); - setState(589); + setState(588); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(585); + setState(584); match(COMMA); - setState(586); + setState(585); enrichWithClause(); } } } - setState(591); + setState(590); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,54,_ctx); } @@ -5254,19 +5252,19 @@ public final EnrichWithClauseContext enrichWithClause() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(597); + setState(596); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { case 1: { - setState(594); + setState(593); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(595); + setState(594); match(ASSIGN); } break; } - setState(599); + setState(598); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5319,13 +5317,13 @@ public final LookupCommandContext lookupCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(601); + setState(600); match(DEV_LOOKUP); - setState(602); + setState(601); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(603); + setState(602); match(ON); - setState(604); + setState(603); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5378,18 +5376,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(606); + setState(605); match(DEV_INLINESTATS); - setState(607); + setState(606); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(610); + setState(609); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(608); + setState(607); match(BY); - setState(609); + setState(608); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5447,12 +5445,12 @@ public final JoinCommandContext joinCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(613); + setState(612); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { { - setState(612); + setState(611); ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { @@ -5466,11 +5464,11 @@ public final JoinCommandContext joinCommand() throws RecognitionException { } } - setState(615); + setState(614); match(DEV_JOIN); - setState(616); + setState(615); joinTarget(); - setState(617); + setState(616); joinCondition(); } } @@ -5523,16 +5521,16 @@ public final JoinTargetContext joinTarget() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(619); + setState(618); ((JoinTargetContext)_localctx).index = identifier(); - setState(622); + setState(621); _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { - setState(620); + setState(619); match(AS); - setState(621); + setState(620); ((JoinTargetContext)_localctx).alias = identifier(); } } @@ -5590,25 +5588,25 @@ public final JoinConditionContext joinCondition() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(624); + setState(623); match(ON); - setState(625); + setState(624); joinPredicate(); - setState(630); + setState(629); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(626); + setState(625); match(COMMA); - setState(627); + setState(626); joinPredicate(); } } } - setState(632); + setState(631); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,60,_ctx); } @@ -5656,7 +5654,7 @@ public final JoinPredicateContext joinPredicate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(633); + setState(632); valueExpression(); } } @@ -5720,47 +5718,45 @@ private boolean processingCommand_sempred(ProcessingCommandContext _localctx, in private boolean booleanExpression_sempred(BooleanExpressionContext _localctx, int predIndex) { switch (predIndex) { case 5: - return this.isDevVersion(); - case 6: return precpred(_ctx, 5); - case 7: + case 6: return precpred(_ctx, 4); } return true; } private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 8: + case 7: return precpred(_ctx, 2); - case 9: + case 8: return precpred(_ctx, 1); } return true; } private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, int predIndex) { switch (predIndex) { - case 10: + case 9: return precpred(_ctx, 1); } return true; } private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { switch (predIndex) { - case 11: + case 10: return this.isDevVersion(); } return true; } private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { switch (predIndex) { - case 12: + case 11: return this.isDevVersion(); } return true; } public static final String _serializedATN = - "\u0004\u0001\u0080\u027c\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0080\u027b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5789,377 +5785,377 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00d0\b\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005"+ - "\u0005\u00d8\b\u0005\n\u0005\f\u0005\u00db\t\u0005\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00df\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0006\u0003\u0006\u00e6\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0003\u0006\u00eb\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f6\b\b\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0003\t\u00fc\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0005\t\u0104\b\t\n\t\f\t\u0107\t\t\u0001\n\u0001\n\u0001\n"+ - "\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0111\b\n\u0001\n\u0001"+ - "\n\u0001\n\u0005\n\u0116\b\n\n\n\f\n\u0119\t\n\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0121\b\u000b"+ - "\n\u000b\f\u000b\u0124\t\u000b\u0003\u000b\u0126\b\u000b\u0001\u000b\u0001"+ - "\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0134\b\u000f\n\u000f"+ - "\f\u000f\u0137\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010"+ - "\u013c\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0005\u0011\u0144\b\u0011\n\u0011\f\u0011\u0147\t\u0011\u0001"+ - "\u0011\u0003\u0011\u014a\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003"+ - "\u0012\u014f\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ - "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0159\b\u0015\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015f\b\u0016\n"+ - "\u0016\f\u0016\u0162\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016c"+ - "\b\u0018\n\u0018\f\u0018\u016f\t\u0018\u0001\u0018\u0003\u0018\u0172\b"+ - "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0176\b\u0018\u0001\u0019\u0001"+ - "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017d\b\u001a\u0001"+ - "\u001a\u0001\u001a\u0003\u001a\u0181\b\u001a\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0005\u001b\u0186\b\u001b\n\u001b\f\u001b\u0189\t\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001c\u0003\u001c\u018e\b\u001c\u0001\u001d\u0001\u001d"+ - "\u0001\u001d\u0005\u001d\u0193\b\u001d\n\u001d\f\u001d\u0196\t\u001d\u0001"+ - "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u019b\b\u001e\n\u001e\f\u001e"+ - "\u019e\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a3\b"+ - "\u001f\n\u001f\f\u001f\u01a6\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ - "!\u0003!\u01ad\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ - "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bc\b\"\n"+ - "\"\f\"\u01bf\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ - "\"\u01c7\b\"\n\"\f\"\u01ca\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ - "\u0001\"\u0005\"\u01d2\b\"\n\"\f\"\u01d5\t\"\u0001\"\u0001\"\u0003\"\u01d9"+ - "\b\"\u0001#\u0001#\u0003#\u01dd\b#\u0001$\u0001$\u0001$\u0003$\u01e2\b"+ - "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01eb\b&\n&"+ - "\f&\u01ee\t&\u0001\'\u0001\'\u0003\'\u01f2\b\'\u0001\'\u0001\'\u0003\'"+ - "\u01f6\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0005*\u0202\b*\n*\f*\u0205\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ - ",\u0001,\u0001,\u0001,\u0003,\u020f\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ - ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u021b\b/\n/\f/\u021e\t/\u0001"+ - "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0228\b2\u0001"+ - "3\u00033\u022b\b3\u00013\u00013\u00014\u00034\u0230\b4\u00014\u00014\u0001"+ - "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ - "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0246\b:\u0001"+ - ":\u0001:\u0001:\u0001:\u0005:\u024c\b:\n:\f:\u024f\t:\u0003:\u0251\b:"+ - "\u0001;\u0001;\u0001;\u0003;\u0256\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ - "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0263\b=\u0001>\u0003"+ - ">\u0266\b>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026f"+ - "\b?\u0001@\u0001@\u0001@\u0001@\u0005@\u0275\b@\n@\f@\u0278\t@\u0001A"+ - "\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006"+ - "\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,."+ - "02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000"+ - "@A\u0001\u0000BD\u0002\u0000\u001f\u001fQQ\u0001\u0000HI\u0002\u0000$"+ - "$((\u0002\u0000++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016"+ - "\u0018\u0295\u0000\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ - "\u0000\u0000\u0004\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000"+ - "\u0000\u0000\b\u00ae\u0001\u0000\u0000\u0000\n\u00cf\u0001\u0000\u0000"+ - "\u0000\f\u00ea\u0001\u0000\u0000\u0000\u000e\u00ec\u0001\u0000\u0000\u0000"+ - "\u0010\u00f5\u0001\u0000\u0000\u0000\u0012\u00fb\u0001\u0000\u0000\u0000"+ - "\u0014\u0110\u0001\u0000\u0000\u0000\u0016\u011a\u0001\u0000\u0000\u0000"+ - "\u0018\u0129\u0001\u0000\u0000\u0000\u001a\u012b\u0001\u0000\u0000\u0000"+ - "\u001c\u012d\u0001\u0000\u0000\u0000\u001e\u0130\u0001\u0000\u0000\u0000"+ - " \u013b\u0001\u0000\u0000\u0000\"\u013f\u0001\u0000\u0000\u0000$\u014e"+ - "\u0001\u0000\u0000\u0000&\u0152\u0001\u0000\u0000\u0000(\u0154\u0001\u0000"+ - "\u0000\u0000*\u0158\u0001\u0000\u0000\u0000,\u015a\u0001\u0000\u0000\u0000"+ - ".\u0163\u0001\u0000\u0000\u00000\u0167\u0001\u0000\u0000\u00002\u0177"+ - "\u0001\u0000\u0000\u00004\u017a\u0001\u0000\u0000\u00006\u0182\u0001\u0000"+ - "\u0000\u00008\u018a\u0001\u0000\u0000\u0000:\u018f\u0001\u0000\u0000\u0000"+ - "<\u0197\u0001\u0000\u0000\u0000>\u019f\u0001\u0000\u0000\u0000@\u01a7"+ - "\u0001\u0000\u0000\u0000B\u01ac\u0001\u0000\u0000\u0000D\u01d8\u0001\u0000"+ - "\u0000\u0000F\u01dc\u0001\u0000\u0000\u0000H\u01e1\u0001\u0000\u0000\u0000"+ - "J\u01e3\u0001\u0000\u0000\u0000L\u01e6\u0001\u0000\u0000\u0000N\u01ef"+ - "\u0001\u0000\u0000\u0000P\u01f7\u0001\u0000\u0000\u0000R\u01fa\u0001\u0000"+ - "\u0000\u0000T\u01fd\u0001\u0000\u0000\u0000V\u0206\u0001\u0000\u0000\u0000"+ - "X\u020a\u0001\u0000\u0000\u0000Z\u0210\u0001\u0000\u0000\u0000\\\u0214"+ - "\u0001\u0000\u0000\u0000^\u0217\u0001\u0000\u0000\u0000`\u021f\u0001\u0000"+ - "\u0000\u0000b\u0223\u0001\u0000\u0000\u0000d\u0227\u0001\u0000\u0000\u0000"+ - "f\u022a\u0001\u0000\u0000\u0000h\u022f\u0001\u0000\u0000\u0000j\u0233"+ - "\u0001\u0000\u0000\u0000l\u0235\u0001\u0000\u0000\u0000n\u0237\u0001\u0000"+ - "\u0000\u0000p\u023a\u0001\u0000\u0000\u0000r\u023e\u0001\u0000\u0000\u0000"+ - "t\u0241\u0001\u0000\u0000\u0000v\u0255\u0001\u0000\u0000\u0000x\u0259"+ - "\u0001\u0000\u0000\u0000z\u025e\u0001\u0000\u0000\u0000|\u0265\u0001\u0000"+ - "\u0000\u0000~\u026b\u0001\u0000\u0000\u0000\u0080\u0270\u0001\u0000\u0000"+ - "\u0000\u0082\u0279\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001"+ - "\u0000\u0085\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000"+ - "\u0000\u0087\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004"+ - "\u0002\u0000\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000"+ - "\u0000\u008b\u008c\u0005\u001e\u0000\u0000\u008c\u008e\u0003\u0006\u0003"+ - "\u0000\u008d\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000"+ - "\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000"+ - "\u0000\u0090\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ - "\u0000\u0092\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094"+ - "\u0099\u0003\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004"+ - "\u0002\u0001\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000"+ - "\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000"+ - "\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000"+ - "\u0000\u0000\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019"+ - "\u0000\u009b\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d"+ - "\u00ad\u0003J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003"+ - "L&\u0000\u00a0\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad"+ - "\u0003X,\u0000\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5"+ - "\u00ad\u0003\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad"+ - "\u0003z=\u0000\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x"+ - "<\u0000\u00aa\u00ab\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000"+ - "\u00ac\u009a\u0001\u0000\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000"+ - "\u00ac\u009c\u0001\u0000\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000"+ - "\u00ac\u009e\u0001\u0000\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a0\u0001\u0000\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a2\u0001\u0000\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a4\u0001\u0000\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000"+ - "\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000"+ - "\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000"+ - "\u00ae\u00af\u0005\u0010\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0"+ - "\t\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2"+ - "\u00b3\u00051\u0000\u0000\u00b3\u00d0\u0003\n\u0005\b\u00b4\u00d0\u0003"+ - "\u0010\b\u0000\u00b5\u00d0\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010"+ - "\b\u0000\u00b7\u00b9\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000"+ - "\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000"+ - "\u0000\u00ba\u00bb\u0005,\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc"+ - "\u00c1\u0003\u0010\b\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0"+ - "\u0003\u0010\b\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001"+ - "\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001"+ - "\u0000\u0000\u0000\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001"+ - "\u0000\u0000\u0000\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00d0\u0001\u0000"+ - "\u0000\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000"+ - "\u0000\u00c8\u00ca\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000"+ - "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000"+ - "\u00cb\u00cc\u00052\u0000\u0000\u00cc\u00d0\u0001\u0000\u0000\u0000\u00cd"+ - "\u00ce\u0004\u0005\u0005\u0000\u00ce\u00d0\u0003\u000e\u0007\u0000\u00cf"+ - "\u00b1\u0001\u0000\u0000\u0000\u00cf\u00b4\u0001\u0000\u0000\u0000\u00cf"+ - "\u00b5\u0001\u0000\u0000\u0000\u00cf\u00b6\u0001\u0000\u0000\u0000\u00cf"+ - "\u00c6\u0001\u0000\u0000\u0000\u00cf\u00cd\u0001\u0000\u0000\u0000\u00d0"+ - "\u00d9\u0001\u0000\u0000\u0000\u00d1\u00d2\n\u0005\u0000\u0000\u00d2\u00d3"+ - "\u0005#\u0000\u0000\u00d3\u00d8\u0003\n\u0005\u0006\u00d4\u00d5\n\u0004"+ - "\u0000\u0000\u00d5\u00d6\u00054\u0000\u0000\u00d6\u00d8\u0003\n\u0005"+ - "\u0005\u00d7\u00d1\u0001\u0000\u0000\u0000\u00d7\u00d4\u0001\u0000\u0000"+ - "\u0000\u00d8\u00db\u0001\u0000\u0000\u0000\u00d9\u00d7\u0001\u0000\u0000"+ - "\u0000\u00d9\u00da\u0001\u0000\u0000\u0000\u00da\u000b\u0001\u0000\u0000"+ - "\u0000\u00db\u00d9\u0001\u0000\u0000\u0000\u00dc\u00de\u0003\u0010\b\u0000"+ - "\u00dd\u00df\u00051\u0000\u0000\u00de\u00dd\u0001\u0000\u0000\u0000\u00de"+ - "\u00df\u0001\u0000\u0000\u0000\u00df\u00e0\u0001\u0000\u0000\u0000\u00e0"+ - "\u00e1\u0005/\u0000\u0000\u00e1\u00e2\u0003j5\u0000\u00e2\u00eb\u0001"+ - "\u0000\u0000\u0000\u00e3\u00e5\u0003\u0010\b\u0000\u00e4\u00e6\u00051"+ - "\u0000\u0000\u00e5\u00e4\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000"+ - "\u0000\u0000\u00e6\u00e7\u0001\u0000\u0000\u0000\u00e7\u00e8\u00056\u0000"+ - "\u0000\u00e8\u00e9\u0003j5\u0000\u00e9\u00eb\u0001\u0000\u0000\u0000\u00ea"+ - "\u00dc\u0001\u0000\u0000\u0000\u00ea\u00e3\u0001\u0000\u0000\u0000\u00eb"+ - "\r\u0001\u0000\u0000\u0000\u00ec\u00ed\u0003:\u001d\u0000\u00ed\u00ee"+ - "\u0005\u001d\u0000\u0000\u00ee\u00ef\u0003D\"\u0000\u00ef\u000f\u0001"+ - "\u0000\u0000\u0000\u00f0\u00f6\u0003\u0012\t\u0000\u00f1\u00f2\u0003\u0012"+ - "\t\u0000\u00f2\u00f3\u0003l6\u0000\u00f3\u00f4\u0003\u0012\t\u0000\u00f4"+ - "\u00f6\u0001\u0000\u0000\u0000\u00f5\u00f0\u0001\u0000\u0000\u0000\u00f5"+ - "\u00f1\u0001\u0000\u0000\u0000\u00f6\u0011\u0001\u0000\u0000\u0000\u00f7"+ - "\u00f8\u0006\t\uffff\uffff\u0000\u00f8\u00fc\u0003\u0014\n\u0000\u00f9"+ - "\u00fa\u0007\u0000\u0000\u0000\u00fa\u00fc\u0003\u0012\t\u0003\u00fb\u00f7"+ - "\u0001\u0000\u0000\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0105"+ - "\u0001\u0000\u0000\u0000\u00fd\u00fe\n\u0002\u0000\u0000\u00fe\u00ff\u0007"+ - "\u0001\u0000\u0000\u00ff\u0104\u0003\u0012\t\u0003\u0100\u0101\n\u0001"+ - "\u0000\u0000\u0101\u0102\u0007\u0000\u0000\u0000\u0102\u0104\u0003\u0012"+ - "\t\u0002\u0103\u00fd\u0001\u0000\u0000\u0000\u0103\u0100\u0001\u0000\u0000"+ - "\u0000\u0104\u0107\u0001\u0000\u0000\u0000\u0105\u0103\u0001\u0000\u0000"+ - "\u0000\u0105\u0106\u0001\u0000\u0000\u0000\u0106\u0013\u0001\u0000\u0000"+ - "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u0109\u0006\n\uffff\uffff"+ - "\u0000\u0109\u0111\u0003D\"\u0000\u010a\u0111\u0003:\u001d\u0000\u010b"+ - "\u0111\u0003\u0016\u000b\u0000\u010c\u010d\u00050\u0000\u0000\u010d\u010e"+ - "\u0003\n\u0005\u0000\u010e\u010f\u00057\u0000\u0000\u010f\u0111\u0001"+ - "\u0000\u0000\u0000\u0110\u0108\u0001\u0000\u0000\u0000\u0110\u010a\u0001"+ - "\u0000\u0000\u0000\u0110\u010b\u0001\u0000\u0000\u0000\u0110\u010c\u0001"+ - "\u0000\u0000\u0000\u0111\u0117\u0001\u0000\u0000\u0000\u0112\u0113\n\u0001"+ - "\u0000\u0000\u0113\u0114\u0005&\u0000\u0000\u0114\u0116\u0003\u001a\r"+ - "\u0000\u0115\u0112\u0001\u0000\u0000\u0000\u0116\u0119\u0001\u0000\u0000"+ - "\u0000\u0117\u0115\u0001\u0000\u0000\u0000\u0117\u0118\u0001\u0000\u0000"+ - "\u0000\u0118\u0015\u0001\u0000\u0000\u0000\u0119\u0117\u0001\u0000\u0000"+ - "\u0000\u011a\u011b\u0003\u0018\f\u0000\u011b\u0125\u00050\u0000\u0000"+ - "\u011c\u0126\u0005B\u0000\u0000\u011d\u0122\u0003\n\u0005\u0000\u011e"+ - "\u011f\u0005\'\u0000\u0000\u011f\u0121\u0003\n\u0005\u0000\u0120\u011e"+ - "\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000\u0122\u0120"+ - "\u0001\u0000\u0000\u0000\u0122\u0123\u0001\u0000\u0000\u0000\u0123\u0126"+ - "\u0001\u0000\u0000\u0000\u0124\u0122\u0001\u0000\u0000\u0000\u0125\u011c"+ - "\u0001\u0000\u0000\u0000\u0125\u011d\u0001\u0000\u0000\u0000\u0125\u0126"+ - "\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000\u0127\u0128"+ - "\u00057\u0000\u0000\u0128\u0017\u0001\u0000\u0000\u0000\u0129\u012a\u0003"+ - "H$\u0000\u012a\u0019\u0001\u0000\u0000\u0000\u012b\u012c\u0003@ \u0000"+ - "\u012c\u001b\u0001\u0000\u0000\u0000\u012d\u012e\u0005\f\u0000\u0000\u012e"+ - "\u012f\u0003\u001e\u000f\u0000\u012f\u001d\u0001\u0000\u0000\u0000\u0130"+ - "\u0135\u0003 \u0010\u0000\u0131\u0132\u0005\'\u0000\u0000\u0132\u0134"+ - "\u0003 \u0010\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0134\u0137\u0001"+ - "\u0000\u0000\u0000\u0135\u0133\u0001\u0000\u0000\u0000\u0135\u0136\u0001"+ - "\u0000\u0000\u0000\u0136\u001f\u0001\u0000\u0000\u0000\u0137\u0135\u0001"+ - "\u0000\u0000\u0000\u0138\u0139\u0003:\u001d\u0000\u0139\u013a\u0005%\u0000"+ - "\u0000\u013a\u013c\u0001\u0000\u0000\u0000\u013b\u0138\u0001\u0000\u0000"+ - "\u0000\u013b\u013c\u0001\u0000\u0000\u0000\u013c\u013d\u0001\u0000\u0000"+ - "\u0000\u013d\u013e\u0003\n\u0005\u0000\u013e!\u0001\u0000\u0000\u0000"+ - "\u013f\u0140\u0005\u0006\u0000\u0000\u0140\u0145\u0003$\u0012\u0000\u0141"+ - "\u0142\u0005\'\u0000\u0000\u0142\u0144\u0003$\u0012\u0000\u0143\u0141"+ - "\u0001\u0000\u0000\u0000\u0144\u0147\u0001\u0000\u0000\u0000\u0145\u0143"+ - "\u0001\u0000\u0000\u0000\u0145\u0146\u0001\u0000\u0000\u0000\u0146\u0149"+ - "\u0001\u0000\u0000\u0000\u0147\u0145\u0001\u0000\u0000\u0000\u0148\u014a"+ - "\u0003*\u0015\u0000\u0149\u0148\u0001\u0000\u0000\u0000\u0149\u014a\u0001"+ - "\u0000\u0000\u0000\u014a#\u0001\u0000\u0000\u0000\u014b\u014c\u0003&\u0013"+ - "\u0000\u014c\u014d\u0005\u001d\u0000\u0000\u014d\u014f\u0001\u0000\u0000"+ - "\u0000\u014e\u014b\u0001\u0000\u0000\u0000\u014e\u014f\u0001\u0000\u0000"+ - "\u0000\u014f\u0150\u0001\u0000\u0000\u0000\u0150\u0151\u0003(\u0014\u0000"+ - "\u0151%\u0001\u0000\u0000\u0000\u0152\u0153\u0005Q\u0000\u0000\u0153\'"+ - "\u0001\u0000\u0000\u0000\u0154\u0155\u0007\u0002\u0000\u0000\u0155)\u0001"+ - "\u0000\u0000\u0000\u0156\u0159\u0003,\u0016\u0000\u0157\u0159\u0003.\u0017"+ - "\u0000\u0158\u0156\u0001\u0000\u0000\u0000\u0158\u0157\u0001\u0000\u0000"+ - "\u0000\u0159+\u0001\u0000\u0000\u0000\u015a\u015b\u0005P\u0000\u0000\u015b"+ - "\u0160\u0005Q\u0000\u0000\u015c\u015d\u0005\'\u0000\u0000\u015d\u015f"+ - "\u0005Q\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f\u0162\u0001"+ - "\u0000\u0000\u0000\u0160\u015e\u0001\u0000\u0000\u0000\u0160\u0161\u0001"+ - "\u0000\u0000\u0000\u0161-\u0001\u0000\u0000\u0000\u0162\u0160\u0001\u0000"+ - "\u0000\u0000\u0163\u0164\u0005F\u0000\u0000\u0164\u0165\u0003,\u0016\u0000"+ - "\u0165\u0166\u0005G\u0000\u0000\u0166/\u0001\u0000\u0000\u0000\u0167\u0168"+ - "\u0005\u0013\u0000\u0000\u0168\u016d\u0003$\u0012\u0000\u0169\u016a\u0005"+ - "\'\u0000\u0000\u016a\u016c\u0003$\u0012\u0000\u016b\u0169\u0001\u0000"+ - "\u0000\u0000\u016c\u016f\u0001\u0000\u0000\u0000\u016d\u016b\u0001\u0000"+ - "\u0000\u0000\u016d\u016e\u0001\u0000\u0000\u0000\u016e\u0171\u0001\u0000"+ - "\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170\u0172\u00036\u001b"+ - "\u0000\u0171\u0170\u0001\u0000\u0000\u0000\u0171\u0172\u0001\u0000\u0000"+ - "\u0000\u0172\u0175\u0001\u0000\u0000\u0000\u0173\u0174\u0005\"\u0000\u0000"+ - "\u0174\u0176\u0003\u001e\u000f\u0000\u0175\u0173\u0001\u0000\u0000\u0000"+ - "\u0175\u0176\u0001\u0000\u0000\u0000\u01761\u0001\u0000\u0000\u0000\u0177"+ - "\u0178\u0005\u0004\u0000\u0000\u0178\u0179\u0003\u001e\u000f\u0000\u0179"+ - "3\u0001\u0000\u0000\u0000\u017a\u017c\u0005\u000f\u0000\u0000\u017b\u017d"+ - "\u00036\u001b\u0000\u017c\u017b\u0001\u0000\u0000\u0000\u017c\u017d\u0001"+ - "\u0000\u0000\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017f\u0005"+ - "\"\u0000\u0000\u017f\u0181\u0003\u001e\u000f\u0000\u0180\u017e\u0001\u0000"+ - "\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000\u01815\u0001\u0000\u0000"+ - "\u0000\u0182\u0187\u00038\u001c\u0000\u0183\u0184\u0005\'\u0000\u0000"+ - "\u0184\u0186\u00038\u001c\u0000\u0185\u0183\u0001\u0000\u0000\u0000\u0186"+ - "\u0189\u0001\u0000\u0000\u0000\u0187\u0185\u0001\u0000\u0000\u0000\u0187"+ - "\u0188\u0001\u0000\u0000\u0000\u01887\u0001\u0000\u0000\u0000\u0189\u0187"+ - "\u0001\u0000\u0000\u0000\u018a\u018d\u0003 \u0010\u0000\u018b\u018c\u0005"+ - "\u0010\u0000\u0000\u018c\u018e\u0003\n\u0005\u0000\u018d\u018b\u0001\u0000"+ - "\u0000\u0000\u018d\u018e\u0001\u0000\u0000\u0000\u018e9\u0001\u0000\u0000"+ - "\u0000\u018f\u0194\u0003H$\u0000\u0190\u0191\u0005)\u0000\u0000\u0191"+ - "\u0193\u0003H$\u0000\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0196\u0001"+ - "\u0000\u0000\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0194\u0195\u0001"+ - "\u0000\u0000\u0000\u0195;\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000"+ - "\u0000\u0000\u0197\u019c\u0003B!\u0000\u0198\u0199\u0005)\u0000\u0000"+ - "\u0199\u019b\u0003B!\u0000\u019a\u0198\u0001\u0000\u0000\u0000\u019b\u019e"+ - "\u0001\u0000\u0000\u0000\u019c\u019a\u0001\u0000\u0000\u0000\u019c\u019d"+ - "\u0001\u0000\u0000\u0000\u019d=\u0001\u0000\u0000\u0000\u019e\u019c\u0001"+ - "\u0000\u0000\u0000\u019f\u01a4\u0003<\u001e\u0000\u01a0\u01a1\u0005\'"+ - "\u0000\u0000\u01a1\u01a3\u0003<\u001e\u0000\u01a2\u01a0\u0001\u0000\u0000"+ - "\u0000\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000"+ - "\u0000\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5?\u0001\u0000\u0000\u0000"+ - "\u01a6\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0007\u0003\u0000\u0000"+ - "\u01a8A\u0001\u0000\u0000\u0000\u01a9\u01ad\u0005U\u0000\u0000\u01aa\u01ab"+ - "\u0004!\u000b\u0000\u01ab\u01ad\u0003F#\u0000\u01ac\u01a9\u0001\u0000"+ - "\u0000\u0000\u01ac\u01aa\u0001\u0000\u0000\u0000\u01adC\u0001\u0000\u0000"+ - "\u0000\u01ae\u01d9\u00052\u0000\u0000\u01af\u01b0\u0003h4\u0000\u01b0"+ - "\u01b1\u0005H\u0000\u0000\u01b1\u01d9\u0001\u0000\u0000\u0000\u01b2\u01d9"+ - "\u0003f3\u0000\u01b3\u01d9\u0003h4\u0000\u01b4\u01d9\u0003b1\u0000\u01b5"+ - "\u01d9\u0003F#\u0000\u01b6\u01d9\u0003j5\u0000\u01b7\u01b8\u0005F\u0000"+ - "\u0000\u01b8\u01bd\u0003d2\u0000\u01b9\u01ba\u0005\'\u0000\u0000\u01ba"+ - "\u01bc\u0003d2\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bc\u01bf\u0001"+ - "\u0000\u0000\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01bd\u01be\u0001"+ - "\u0000\u0000\u0000\u01be\u01c0\u0001\u0000\u0000\u0000\u01bf\u01bd\u0001"+ - "\u0000\u0000\u0000\u01c0\u01c1\u0005G\u0000\u0000\u01c1\u01d9\u0001\u0000"+ - "\u0000\u0000\u01c2\u01c3\u0005F\u0000\u0000\u01c3\u01c8\u0003b1\u0000"+ - "\u01c4\u01c5\u0005\'\u0000\u0000\u01c5\u01c7\u0003b1\u0000\u01c6\u01c4"+ - "\u0001\u0000\u0000\u0000\u01c7\u01ca\u0001\u0000\u0000\u0000\u01c8\u01c6"+ - "\u0001\u0000\u0000\u0000\u01c8\u01c9\u0001\u0000\u0000\u0000\u01c9\u01cb"+ - "\u0001\u0000\u0000\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01cc"+ - "\u0005G\u0000\u0000\u01cc\u01d9\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005"+ - "F\u0000\u0000\u01ce\u01d3\u0003j5\u0000\u01cf\u01d0\u0005\'\u0000\u0000"+ - "\u01d0\u01d2\u0003j5\u0000\u01d1\u01cf\u0001\u0000\u0000\u0000\u01d2\u01d5"+ - "\u0001\u0000\u0000\u0000\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3\u01d4"+ - "\u0001\u0000\u0000\u0000\u01d4\u01d6\u0001\u0000\u0000\u0000\u01d5\u01d3"+ - "\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005G\u0000\u0000\u01d7\u01d9\u0001"+ - "\u0000\u0000\u0000\u01d8\u01ae\u0001\u0000\u0000\u0000\u01d8\u01af\u0001"+ - "\u0000\u0000\u0000\u01d8\u01b2\u0001\u0000\u0000\u0000\u01d8\u01b3\u0001"+ - "\u0000\u0000\u0000\u01d8\u01b4\u0001\u0000\u0000\u0000\u01d8\u01b5\u0001"+ - "\u0000\u0000\u0000\u01d8\u01b6\u0001\u0000\u0000\u0000\u01d8\u01b7\u0001"+ - "\u0000\u0000\u0000\u01d8\u01c2\u0001\u0000\u0000\u0000\u01d8\u01cd\u0001"+ - "\u0000\u0000\u0000\u01d9E\u0001\u0000\u0000\u0000\u01da\u01dd\u00055\u0000"+ - "\u0000\u01db\u01dd\u0005E\u0000\u0000\u01dc\u01da\u0001\u0000\u0000\u0000"+ - "\u01dc\u01db\u0001\u0000\u0000\u0000\u01ddG\u0001\u0000\u0000\u0000\u01de"+ - "\u01e2\u0003@ \u0000\u01df\u01e0\u0004$\f\u0000\u01e0\u01e2\u0003F#\u0000"+ - "\u01e1\u01de\u0001\u0000\u0000\u0000\u01e1\u01df\u0001\u0000\u0000\u0000"+ - "\u01e2I\u0001\u0000\u0000\u0000\u01e3\u01e4\u0005\t\u0000\u0000\u01e4"+ - "\u01e5\u0005 \u0000\u0000\u01e5K\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005"+ - "\u000e\u0000\u0000\u01e7\u01ec\u0003N\'\u0000\u01e8\u01e9\u0005\'\u0000"+ - "\u0000\u01e9\u01eb\u0003N\'\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000"+ - "\u01eb\u01ee\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000"+ - "\u01ec\u01ed\u0001\u0000\u0000\u0000\u01edM\u0001\u0000\u0000\u0000\u01ee"+ - "\u01ec\u0001\u0000\u0000\u0000\u01ef\u01f1\u0003\n\u0005\u0000\u01f0\u01f2"+ - "\u0007\u0004\u0000\u0000\u01f1\u01f0\u0001\u0000\u0000\u0000\u01f1\u01f2"+ - "\u0001\u0000\u0000\u0000\u01f2\u01f5\u0001\u0000\u0000\u0000\u01f3\u01f4"+ - "\u00053\u0000\u0000\u01f4\u01f6\u0007\u0005\u0000\u0000\u01f5\u01f3\u0001"+ - "\u0000\u0000\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6O\u0001\u0000"+ - "\u0000\u0000\u01f7\u01f8\u0005\b\u0000\u0000\u01f8\u01f9\u0003>\u001f"+ - "\u0000\u01f9Q\u0001\u0000\u0000\u0000\u01fa\u01fb\u0005\u0002\u0000\u0000"+ - "\u01fb\u01fc\u0003>\u001f\u0000\u01fcS\u0001\u0000\u0000\u0000\u01fd\u01fe"+ - "\u0005\u000b\u0000\u0000\u01fe\u0203\u0003V+\u0000\u01ff\u0200\u0005\'"+ - "\u0000\u0000\u0200\u0202\u0003V+\u0000\u0201\u01ff\u0001\u0000\u0000\u0000"+ - "\u0202\u0205\u0001\u0000\u0000\u0000\u0203\u0201\u0001\u0000\u0000\u0000"+ - "\u0203\u0204\u0001\u0000\u0000\u0000\u0204U\u0001\u0000\u0000\u0000\u0205"+ - "\u0203\u0001\u0000\u0000\u0000\u0206\u0207\u0003<\u001e\u0000\u0207\u0208"+ - "\u0005Y\u0000\u0000\u0208\u0209\u0003<\u001e\u0000\u0209W\u0001\u0000"+ - "\u0000\u0000\u020a\u020b\u0005\u0001\u0000\u0000\u020b\u020c\u0003\u0014"+ - "\n\u0000\u020c\u020e\u0003j5\u0000\u020d\u020f\u0003^/\u0000\u020e\u020d"+ - "\u0001\u0000\u0000\u0000\u020e\u020f\u0001\u0000\u0000\u0000\u020fY\u0001"+ - "\u0000\u0000\u0000\u0210\u0211\u0005\u0007\u0000\u0000\u0211\u0212\u0003"+ - "\u0014\n\u0000\u0212\u0213\u0003j5\u0000\u0213[\u0001\u0000\u0000\u0000"+ - "\u0214\u0215\u0005\n\u0000\u0000\u0215\u0216\u0003:\u001d\u0000\u0216"+ - "]\u0001\u0000\u0000\u0000\u0217\u021c\u0003`0\u0000\u0218\u0219\u0005"+ - "\'\u0000\u0000\u0219\u021b\u0003`0\u0000\u021a\u0218\u0001\u0000\u0000"+ - "\u0000\u021b\u021e\u0001\u0000\u0000\u0000\u021c\u021a\u0001\u0000\u0000"+ - "\u0000\u021c\u021d\u0001\u0000\u0000\u0000\u021d_\u0001\u0000\u0000\u0000"+ - "\u021e\u021c\u0001\u0000\u0000\u0000\u021f\u0220\u0003@ \u0000\u0220\u0221"+ - "\u0005%\u0000\u0000\u0221\u0222\u0003D\"\u0000\u0222a\u0001\u0000\u0000"+ - "\u0000\u0223\u0224\u0007\u0006\u0000\u0000\u0224c\u0001\u0000\u0000\u0000"+ - "\u0225\u0228\u0003f3\u0000\u0226\u0228\u0003h4\u0000\u0227\u0225\u0001"+ - "\u0000\u0000\u0000\u0227\u0226\u0001\u0000\u0000\u0000\u0228e\u0001\u0000"+ - "\u0000\u0000\u0229\u022b\u0007\u0000\u0000\u0000\u022a\u0229\u0001\u0000"+ - "\u0000\u0000\u022a\u022b\u0001\u0000\u0000\u0000\u022b\u022c\u0001\u0000"+ - "\u0000\u0000\u022c\u022d\u0005!\u0000\u0000\u022dg\u0001\u0000\u0000\u0000"+ - "\u022e\u0230\u0007\u0000\u0000\u0000\u022f\u022e\u0001\u0000\u0000\u0000"+ - "\u022f\u0230\u0001\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000"+ - "\u0231\u0232\u0005 \u0000\u0000\u0232i\u0001\u0000\u0000\u0000\u0233\u0234"+ - "\u0005\u001f\u0000\u0000\u0234k\u0001\u0000\u0000\u0000\u0235\u0236\u0007"+ - "\u0007\u0000\u0000\u0236m\u0001\u0000\u0000\u0000\u0237\u0238\u0005\u0005"+ - "\u0000\u0000\u0238\u0239\u0003p8\u0000\u0239o\u0001\u0000\u0000\u0000"+ - "\u023a\u023b\u0005F\u0000\u0000\u023b\u023c\u0003\u0002\u0001\u0000\u023c"+ - "\u023d\u0005G\u0000\u0000\u023dq\u0001\u0000\u0000\u0000\u023e\u023f\u0005"+ - "\r\u0000\u0000\u023f\u0240\u0005i\u0000\u0000\u0240s\u0001\u0000\u0000"+ - "\u0000\u0241\u0242\u0005\u0003\u0000\u0000\u0242\u0245\u0005_\u0000\u0000"+ - "\u0243\u0244\u0005]\u0000\u0000\u0244\u0246\u0003<\u001e\u0000\u0245\u0243"+ - "\u0001\u0000\u0000\u0000\u0245\u0246\u0001\u0000\u0000\u0000\u0246\u0250"+ - "\u0001\u0000\u0000\u0000\u0247\u0248\u0005^\u0000\u0000\u0248\u024d\u0003"+ - "v;\u0000\u0249\u024a\u0005\'\u0000\u0000\u024a\u024c\u0003v;\u0000\u024b"+ - "\u0249\u0001\u0000\u0000\u0000\u024c\u024f\u0001\u0000\u0000\u0000\u024d"+ - "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ - "\u0251\u0001\u0000\u0000\u0000\u024f\u024d\u0001\u0000\u0000\u0000\u0250"+ - "\u0247\u0001\u0000\u0000\u0000\u0250\u0251\u0001\u0000\u0000\u0000\u0251"+ - "u\u0001\u0000\u0000\u0000\u0252\u0253\u0003<\u001e\u0000\u0253\u0254\u0005"+ - "%\u0000\u0000\u0254\u0256\u0001\u0000\u0000\u0000\u0255\u0252\u0001\u0000"+ - "\u0000\u0000\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0001\u0000"+ - "\u0000\u0000\u0257\u0258\u0003<\u001e\u0000\u0258w\u0001\u0000\u0000\u0000"+ - "\u0259\u025a\u0005\u0012\u0000\u0000\u025a\u025b\u0003$\u0012\u0000\u025b"+ - "\u025c\u0005]\u0000\u0000\u025c\u025d\u0003>\u001f\u0000\u025dy\u0001"+ - "\u0000\u0000\u0000\u025e\u025f\u0005\u0011\u0000\u0000\u025f\u0262\u0003"+ - "6\u001b\u0000\u0260\u0261\u0005\"\u0000\u0000\u0261\u0263\u0003\u001e"+ - "\u000f\u0000\u0262\u0260\u0001\u0000\u0000\u0000\u0262\u0263\u0001\u0000"+ - "\u0000\u0000\u0263{\u0001\u0000\u0000\u0000\u0264\u0266\u0007\b\u0000"+ - "\u0000\u0265\u0264\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000"+ - "\u0000\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0005\u0014\u0000"+ - "\u0000\u0268\u0269\u0003~?\u0000\u0269\u026a\u0003\u0080@\u0000\u026a"+ - "}\u0001\u0000\u0000\u0000\u026b\u026e\u0003@ \u0000\u026c\u026d\u0005"+ - "Y\u0000\u0000\u026d\u026f\u0003@ \u0000\u026e\u026c\u0001\u0000\u0000"+ - "\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f\u007f\u0001\u0000\u0000"+ - "\u0000\u0270\u0271\u0005]\u0000\u0000\u0271\u0276\u0003\u0082A\u0000\u0272"+ - "\u0273\u0005\'\u0000\u0000\u0273\u0275\u0003\u0082A\u0000\u0274\u0272"+ - "\u0001\u0000\u0000\u0000\u0275\u0278\u0001\u0000\u0000\u0000\u0276\u0274"+ - "\u0001\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0081"+ - "\u0001\u0000\u0000\u0000\u0278\u0276\u0001\u0000\u0000\u0000\u0279\u027a"+ - "\u0003\u0010\b\u0000\u027a\u0083\u0001\u0000\u0000\u0000=\u008f\u0098"+ - "\u00ac\u00b8\u00c1\u00c9\u00cf\u00d7\u00d9\u00de\u00e5\u00ea\u00f5\u00fb"+ - "\u0103\u0105\u0110\u0117\u0122\u0125\u0135\u013b\u0145\u0149\u014e\u0158"+ - "\u0160\u016d\u0171\u0175\u017c\u0180\u0187\u018d\u0194\u019c\u01a4\u01ac"+ - "\u01bd\u01c8\u01d3\u01d8\u01dc\u01e1\u01ec\u01f1\u01f5\u0203\u020e\u021c"+ - "\u0227\u022a\u022f\u0245\u024d\u0250\u0255\u0262\u0265\u026e\u0276"; + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ + "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00f5\b\b\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0003\t\u00fb\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0005\t\u0103\b\t\n\t\f\t\u0106\t\t\u0001\n\u0001\n\u0001\n\u0001\n"+ + "\u0001\n\u0001\n\u0001\n\u0001\n\u0003\n\u0110\b\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u0115\b\n\n\n\f\n\u0118\t\n\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0005\u000b\u0120\b\u000b\n\u000b"+ + "\f\u000b\u0123\t\u000b\u0003\u000b\u0125\b\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\f\u0001\f\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0133\b\u000f\n\u000f\f\u000f"+ + "\u0136\t\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0003\u0010\u013b\b"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ + "\u0011\u0005\u0011\u0143\b\u0011\n\u0011\f\u0011\u0146\t\u0011\u0001\u0011"+ + "\u0003\u0011\u0149\b\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012"+ + "\u014e\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0015\u0001\u0015\u0003\u0015\u0158\b\u0015\u0001\u0016"+ + "\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u015e\b\u0016\n\u0016"+ + "\f\u0016\u0161\t\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u016b\b\u0018"+ + "\n\u0018\f\u0018\u016e\t\u0018\u0001\u0018\u0003\u0018\u0171\b\u0018\u0001"+ + "\u0018\u0001\u0018\u0003\u0018\u0175\b\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u017c\b\u001a\u0001\u001a\u0001"+ + "\u001a\u0003\u001a\u0180\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0005"+ + "\u001b\u0185\b\u001b\n\u001b\f\u001b\u0188\t\u001b\u0001\u001c\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u018d\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d"+ + "\u0005\u001d\u0192\b\u001d\n\u001d\f\u001d\u0195\t\u001d\u0001\u001e\u0001"+ + "\u001e\u0001\u001e\u0005\u001e\u019a\b\u001e\n\u001e\f\u001e\u019d\t\u001e"+ + "\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01a2\b\u001f\n\u001f"+ + "\f\u001f\u01a5\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001!\u0003!\u01ac"+ + "\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01bb\b\"\n\"\f\"\u01be\t\""+ + "\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01c6\b\"\n\""+ + "\f\"\u01c9\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\""+ + "\u01d1\b\"\n\"\f\"\u01d4\t\"\u0001\"\u0001\"\u0003\"\u01d8\b\"\u0001#"+ + "\u0001#\u0003#\u01dc\b#\u0001$\u0001$\u0001$\u0003$\u01e1\b$\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01ea\b&\n&\f&\u01ed\t&\u0001"+ + "\'\u0001\'\u0003\'\u01f1\b\'\u0001\'\u0001\'\u0003\'\u01f5\b\'\u0001("+ + "\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001*\u0001*\u0005"+ + "*\u0201\b*\n*\f*\u0204\t*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ + ",\u0001,\u0003,\u020e\b,\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001"+ + ".\u0001/\u0001/\u0001/\u0005/\u021a\b/\n/\f/\u021d\t/\u00010\u00010\u0001"+ + "0\u00010\u00011\u00011\u00012\u00012\u00032\u0227\b2\u00013\u00033\u022a"+ + "\b3\u00013\u00013\u00014\u00034\u022f\b4\u00014\u00014\u00015\u00015\u0001"+ + "6\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00019\u0001"+ + "9\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0245\b:\u0001:\u0001:\u0001"+ + ":\u0001:\u0005:\u024b\b:\n:\f:\u024e\t:\u0003:\u0250\b:\u0001;\u0001;"+ + "\u0001;\u0003;\u0255\b;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001"+ + "<\u0001=\u0001=\u0001=\u0001=\u0003=\u0262\b=\u0001>\u0003>\u0265\b>\u0001"+ + ">\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u026e\b?\u0001@\u0001"+ + "@\u0001@\u0001@\u0005@\u0274\b@\n@\f@\u0277\t@\u0001A\u0001A\u0001A\u0000"+ + "\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000@A\u0001\u0000"+ + "BD\u0002\u0000\u001e\u001eQQ\u0001\u0000HI\u0002\u0000##((\u0002\u0000"+ + "++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u0294\u0000"+ + "\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000\u0000\u0000\u0004"+ + "\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000\u0000\u0000\b\u00ae"+ + "\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000\u0000\f\u00e9\u0001"+ + "\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000\u0010\u00f4\u0001"+ + "\u0000\u0000\u0000\u0012\u00fa\u0001\u0000\u0000\u0000\u0014\u010f\u0001"+ + "\u0000\u0000\u0000\u0016\u0119\u0001\u0000\u0000\u0000\u0018\u0128\u0001"+ + "\u0000\u0000\u0000\u001a\u012a\u0001\u0000\u0000\u0000\u001c\u012c\u0001"+ + "\u0000\u0000\u0000\u001e\u012f\u0001\u0000\u0000\u0000 \u013a\u0001\u0000"+ + "\u0000\u0000\"\u013e\u0001\u0000\u0000\u0000$\u014d\u0001\u0000\u0000"+ + "\u0000&\u0151\u0001\u0000\u0000\u0000(\u0153\u0001\u0000\u0000\u0000*"+ + "\u0157\u0001\u0000\u0000\u0000,\u0159\u0001\u0000\u0000\u0000.\u0162\u0001"+ + "\u0000\u0000\u00000\u0166\u0001\u0000\u0000\u00002\u0176\u0001\u0000\u0000"+ + "\u00004\u0179\u0001\u0000\u0000\u00006\u0181\u0001\u0000\u0000\u00008"+ + "\u0189\u0001\u0000\u0000\u0000:\u018e\u0001\u0000\u0000\u0000<\u0196\u0001"+ + "\u0000\u0000\u0000>\u019e\u0001\u0000\u0000\u0000@\u01a6\u0001\u0000\u0000"+ + "\u0000B\u01ab\u0001\u0000\u0000\u0000D\u01d7\u0001\u0000\u0000\u0000F"+ + "\u01db\u0001\u0000\u0000\u0000H\u01e0\u0001\u0000\u0000\u0000J\u01e2\u0001"+ + "\u0000\u0000\u0000L\u01e5\u0001\u0000\u0000\u0000N\u01ee\u0001\u0000\u0000"+ + "\u0000P\u01f6\u0001\u0000\u0000\u0000R\u01f9\u0001\u0000\u0000\u0000T"+ + "\u01fc\u0001\u0000\u0000\u0000V\u0205\u0001\u0000\u0000\u0000X\u0209\u0001"+ + "\u0000\u0000\u0000Z\u020f\u0001\u0000\u0000\u0000\\\u0213\u0001\u0000"+ + "\u0000\u0000^\u0216\u0001\u0000\u0000\u0000`\u021e\u0001\u0000\u0000\u0000"+ + "b\u0222\u0001\u0000\u0000\u0000d\u0226\u0001\u0000\u0000\u0000f\u0229"+ + "\u0001\u0000\u0000\u0000h\u022e\u0001\u0000\u0000\u0000j\u0232\u0001\u0000"+ + "\u0000\u0000l\u0234\u0001\u0000\u0000\u0000n\u0236\u0001\u0000\u0000\u0000"+ + "p\u0239\u0001\u0000\u0000\u0000r\u023d\u0001\u0000\u0000\u0000t\u0240"+ + "\u0001\u0000\u0000\u0000v\u0254\u0001\u0000\u0000\u0000x\u0258\u0001\u0000"+ + "\u0000\u0000z\u025d\u0001\u0000\u0000\u0000|\u0264\u0001\u0000\u0000\u0000"+ + "~\u026a\u0001\u0000\u0000\u0000\u0080\u026f\u0001\u0000\u0000\u0000\u0082"+ + "\u0278\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001\u0000\u0085"+ + "\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000\u0000\u0087"+ + "\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004\u0002\u0000"+ + "\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000\u0000\u008b"+ + "\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003\u0000\u008d"+ + "\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000\u0000\u008f"+ + "\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090"+ + "\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000\u0000\u0092"+ + "\u0099\u0003n7\u0000\u0093\u0099\u0003\"\u0011\u0000\u0094\u0099\u0003"+ + "\u001c\u000e\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004\u0002\u0001"+ + "\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000\u0000\u0000"+ + "\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000\u0000\u0000"+ + "\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000"+ + "\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019\u0000\u009b"+ + "\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d\u00ad\u0003"+ + "J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003L&\u0000\u00a0"+ + "\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad\u0003X,\u0000"+ + "\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5\u00ad\u0003"+ + "\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad\u0003z=\u0000"+ + "\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x<\u0000\u00aa\u00ab"+ + "\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000\u00ac\u009a\u0001\u0000"+ + "\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000\u00ac\u009c\u0001\u0000"+ + "\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000\u00ac\u009e\u0001\u0000"+ + "\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000\u00ac\u00a0\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000\u00ac\u00a2\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000\u00ac\u00a4\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000\u00ac\u00a6\u0001\u0000"+ + "\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001\u0000"+ + "\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000\u00ae\u00af\u0005\u0010"+ + "\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0\t\u0001\u0000\u0000"+ + "\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2\u00b3\u00051\u0000"+ + "\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003\u0010\b\u0000\u00b5"+ + "\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010\b\u0000\u00b7\u00b9"+ + "\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000\u0000\u00b8\u00b9\u0001"+ + "\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00bb\u0005"+ + ",\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc\u00c1\u0003\u0010\b"+ + "\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0\u0003\u0010\b\u0000"+ + "\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001\u0000\u0000\u0000"+ + "\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001\u0000\u0000\u0000"+ + "\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000\u0000\u0000"+ + "\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000\u0000\u0000\u00c6"+ + "\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000\u0000\u00c8\u00ca"+ + "\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000\u00c9\u00ca\u0001"+ + "\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005"+ + "2\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd\u00cf\u0003\u000e"+ + "\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce\u00b4\u0001\u0000"+ + "\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce\u00b6\u0001\u0000"+ + "\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce\u00cd\u0001\u0000"+ + "\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0\u00d1\n\u0005\u0000"+ + "\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7\u0003\n\u0005\u0006"+ + "\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u00054\u0000\u0000\u00d5"+ + "\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000\u0000\u0000\u00d6\u00d3"+ + "\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000\u0000\u0000\u00d8\u00d6"+ + "\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000\u0000\u0000\u00d9\u000b"+ + "\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000\u0000\u0000\u00db\u00dd"+ + "\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000\u0000\u00dd\u00dc\u0001"+ + "\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0001"+ + "\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000\u00e0\u00e1\u0003j5"+ + "\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4\u0003\u0010\b\u0000"+ + "\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001\u0000\u0000\u0000\u00e4"+ + "\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001\u0000\u0000\u0000\u00e6"+ + "\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5\u0000\u00e8\u00ea\u0001"+ + "\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000\u0000\u00e9\u00e2\u0001"+ + "\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000\u00eb\u00ec\u0003:"+ + "\u001d\u0000\u00ec\u00ed\u0005&\u0000\u0000\u00ed\u00ee\u0003D\"\u0000"+ + "\u00ee\u000f\u0001\u0000\u0000\u0000\u00ef\u00f5\u0003\u0012\t\u0000\u00f0"+ + "\u00f1\u0003\u0012\t\u0000\u00f1\u00f2\u0003l6\u0000\u00f2\u00f3\u0003"+ + "\u0012\t\u0000\u00f3\u00f5\u0001\u0000\u0000\u0000\u00f4\u00ef\u0001\u0000"+ + "\u0000\u0000\u00f4\u00f0\u0001\u0000\u0000\u0000\u00f5\u0011\u0001\u0000"+ + "\u0000\u0000\u00f6\u00f7\u0006\t\uffff\uffff\u0000\u00f7\u00fb\u0003\u0014"+ + "\n\u0000\u00f8\u00f9\u0007\u0000\u0000\u0000\u00f9\u00fb\u0003\u0012\t"+ + "\u0003\u00fa\u00f6\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000"+ + "\u0000\u00fb\u0104\u0001\u0000\u0000\u0000\u00fc\u00fd\n\u0002\u0000\u0000"+ + "\u00fd\u00fe\u0007\u0001\u0000\u0000\u00fe\u0103\u0003\u0012\t\u0003\u00ff"+ + "\u0100\n\u0001\u0000\u0000\u0100\u0101\u0007\u0000\u0000\u0000\u0101\u0103"+ + "\u0003\u0012\t\u0002\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00ff\u0001"+ + "\u0000\u0000\u0000\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001"+ + "\u0000\u0000\u0000\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0013\u0001"+ + "\u0000\u0000\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u0108\u0006"+ + "\n\uffff\uffff\u0000\u0108\u0110\u0003D\"\u0000\u0109\u0110\u0003:\u001d"+ + "\u0000\u010a\u0110\u0003\u0016\u000b\u0000\u010b\u010c\u00050\u0000\u0000"+ + "\u010c\u010d\u0003\n\u0005\u0000\u010d\u010e\u00057\u0000\u0000\u010e"+ + "\u0110\u0001\u0000\u0000\u0000\u010f\u0107\u0001\u0000\u0000\u0000\u010f"+ + "\u0109\u0001\u0000\u0000\u0000\u010f\u010a\u0001\u0000\u0000\u0000\u010f"+ + "\u010b\u0001\u0000\u0000\u0000\u0110\u0116\u0001\u0000\u0000\u0000\u0111"+ + "\u0112\n\u0001\u0000\u0000\u0112\u0113\u0005%\u0000\u0000\u0113\u0115"+ + "\u0003\u001a\r\u0000\u0114\u0111\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ + "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ + "\u0000\u0000\u0000\u0117\u0015\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0003\u0018\f\u0000\u011a\u0124\u00050"+ + "\u0000\u0000\u011b\u0125\u0005B\u0000\u0000\u011c\u0121\u0003\n\u0005"+ + "\u0000\u011d\u011e\u0005\'\u0000\u0000\u011e\u0120\u0003\n\u0005\u0000"+ + "\u011f\u011d\u0001\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000"+ + "\u0121\u011f\u0001\u0000\u0000\u0000\u0121\u0122\u0001\u0000\u0000\u0000"+ + "\u0122\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000"+ + "\u0124\u011b\u0001\u0000\u0000\u0000\u0124\u011c\u0001\u0000\u0000\u0000"+ + "\u0124\u0125\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000"+ + "\u0126\u0127\u00057\u0000\u0000\u0127\u0017\u0001\u0000\u0000\u0000\u0128"+ + "\u0129\u0003H$\u0000\u0129\u0019\u0001\u0000\u0000\u0000\u012a\u012b\u0003"+ + "@ \u0000\u012b\u001b\u0001\u0000\u0000\u0000\u012c\u012d\u0005\f\u0000"+ + "\u0000\u012d\u012e\u0003\u001e\u000f\u0000\u012e\u001d\u0001\u0000\u0000"+ + "\u0000\u012f\u0134\u0003 \u0010\u0000\u0130\u0131\u0005\'\u0000\u0000"+ + "\u0131\u0133\u0003 \u0010\u0000\u0132\u0130\u0001\u0000\u0000\u0000\u0133"+ + "\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000\u0134"+ + "\u0135\u0001\u0000\u0000\u0000\u0135\u001f\u0001\u0000\u0000\u0000\u0136"+ + "\u0134\u0001\u0000\u0000\u0000\u0137\u0138\u0003:\u001d\u0000\u0138\u0139"+ + "\u0005$\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a\u0137\u0001"+ + "\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013c\u0001"+ + "\u0000\u0000\u0000\u013c\u013d\u0003\n\u0005\u0000\u013d!\u0001\u0000"+ + "\u0000\u0000\u013e\u013f\u0005\u0006\u0000\u0000\u013f\u0144\u0003$\u0012"+ + "\u0000\u0140\u0141\u0005\'\u0000\u0000\u0141\u0143\u0003$\u0012\u0000"+ + "\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000\u0000"+ + "\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000\u0000"+ + "\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000\u0000"+ + "\u0147\u0149\u0003*\u0015\u0000\u0148\u0147\u0001\u0000\u0000\u0000\u0148"+ + "\u0149\u0001\u0000\u0000\u0000\u0149#\u0001\u0000\u0000\u0000\u014a\u014b"+ + "\u0003&\u0013\u0000\u014b\u014c\u0005&\u0000\u0000\u014c\u014e\u0001\u0000"+ + "\u0000\u0000\u014d\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0001\u0000"+ + "\u0000\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f\u0150\u0003(\u0014"+ + "\u0000\u0150%\u0001\u0000\u0000\u0000\u0151\u0152\u0005Q\u0000\u0000\u0152"+ + "\'\u0001\u0000\u0000\u0000\u0153\u0154\u0007\u0002\u0000\u0000\u0154)"+ + "\u0001\u0000\u0000\u0000\u0155\u0158\u0003,\u0016\u0000\u0156\u0158\u0003"+ + ".\u0017\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0157\u0156\u0001\u0000"+ + "\u0000\u0000\u0158+\u0001\u0000\u0000\u0000\u0159\u015a\u0005P\u0000\u0000"+ + "\u015a\u015f\u0005Q\u0000\u0000\u015b\u015c\u0005\'\u0000\u0000\u015c"+ + "\u015e\u0005Q\u0000\u0000\u015d\u015b\u0001\u0000\u0000\u0000\u015e\u0161"+ + "\u0001\u0000\u0000\u0000\u015f\u015d\u0001\u0000\u0000\u0000\u015f\u0160"+ + "\u0001\u0000\u0000\u0000\u0160-\u0001\u0000\u0000\u0000\u0161\u015f\u0001"+ + "\u0000\u0000\u0000\u0162\u0163\u0005F\u0000\u0000\u0163\u0164\u0003,\u0016"+ + "\u0000\u0164\u0165\u0005G\u0000\u0000\u0165/\u0001\u0000\u0000\u0000\u0166"+ + "\u0167\u0005\u0013\u0000\u0000\u0167\u016c\u0003$\u0012\u0000\u0168\u0169"+ + "\u0005\'\u0000\u0000\u0169\u016b\u0003$\u0012\u0000\u016a\u0168\u0001"+ + "\u0000\u0000\u0000\u016b\u016e\u0001\u0000\u0000\u0000\u016c\u016a\u0001"+ + "\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d\u0170\u0001"+ + "\u0000\u0000\u0000\u016e\u016c\u0001\u0000\u0000\u0000\u016f\u0171\u0003"+ + "6\u001b\u0000\u0170\u016f\u0001\u0000\u0000\u0000\u0170\u0171\u0001\u0000"+ + "\u0000\u0000\u0171\u0174\u0001\u0000\u0000\u0000\u0172\u0173\u0005!\u0000"+ + "\u0000\u0173\u0175\u0003\u001e\u000f\u0000\u0174\u0172\u0001\u0000\u0000"+ + "\u0000\u0174\u0175\u0001\u0000\u0000\u0000\u01751\u0001\u0000\u0000\u0000"+ + "\u0176\u0177\u0005\u0004\u0000\u0000\u0177\u0178\u0003\u001e\u000f\u0000"+ + "\u01783\u0001\u0000\u0000\u0000\u0179\u017b\u0005\u000f\u0000\u0000\u017a"+ + "\u017c\u00036\u001b\u0000\u017b\u017a\u0001\u0000\u0000\u0000\u017b\u017c"+ + "\u0001\u0000\u0000\u0000\u017c\u017f\u0001\u0000\u0000\u0000\u017d\u017e"+ + "\u0005!\u0000\u0000\u017e\u0180\u0003\u001e\u000f\u0000\u017f\u017d\u0001"+ + "\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000\u01805\u0001\u0000"+ + "\u0000\u0000\u0181\u0186\u00038\u001c\u0000\u0182\u0183\u0005\'\u0000"+ + "\u0000\u0183\u0185\u00038\u001c\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0185\u0188\u0001\u0000\u0000\u0000\u0186\u0184\u0001\u0000\u0000\u0000"+ + "\u0186\u0187\u0001\u0000\u0000\u0000\u01877\u0001\u0000\u0000\u0000\u0188"+ + "\u0186\u0001\u0000\u0000\u0000\u0189\u018c\u0003 \u0010\u0000\u018a\u018b"+ + "\u0005\u0010\u0000\u0000\u018b\u018d\u0003\n\u0005\u0000\u018c\u018a\u0001"+ + "\u0000\u0000\u0000\u018c\u018d\u0001\u0000\u0000\u0000\u018d9\u0001\u0000"+ + "\u0000\u0000\u018e\u0193\u0003H$\u0000\u018f\u0190\u0005)\u0000\u0000"+ + "\u0190\u0192\u0003H$\u0000\u0191\u018f\u0001\u0000\u0000\u0000\u0192\u0195"+ + "\u0001\u0000\u0000\u0000\u0193\u0191\u0001\u0000\u0000\u0000\u0193\u0194"+ + "\u0001\u0000\u0000\u0000\u0194;\u0001\u0000\u0000\u0000\u0195\u0193\u0001"+ + "\u0000\u0000\u0000\u0196\u019b\u0003B!\u0000\u0197\u0198\u0005)\u0000"+ + "\u0000\u0198\u019a\u0003B!\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u019a"+ + "\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001\u0000\u0000\u0000\u019b"+ + "\u019c\u0001\u0000\u0000\u0000\u019c=\u0001\u0000\u0000\u0000\u019d\u019b"+ + "\u0001\u0000\u0000\u0000\u019e\u01a3\u0003<\u001e\u0000\u019f\u01a0\u0005"+ + "\'\u0000\u0000\u01a0\u01a2\u0003<\u001e\u0000\u01a1\u019f\u0001\u0000"+ + "\u0000\u0000\u01a2\u01a5\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000"+ + "\u0000\u0000\u01a3\u01a4\u0001\u0000\u0000\u0000\u01a4?\u0001\u0000\u0000"+ + "\u0000\u01a5\u01a3\u0001\u0000\u0000\u0000\u01a6\u01a7\u0007\u0003\u0000"+ + "\u0000\u01a7A\u0001\u0000\u0000\u0000\u01a8\u01ac\u0005U\u0000\u0000\u01a9"+ + "\u01aa\u0004!\n\u0000\u01aa\u01ac\u0003F#\u0000\u01ab\u01a8\u0001\u0000"+ + "\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01acC\u0001\u0000\u0000"+ + "\u0000\u01ad\u01d8\u00052\u0000\u0000\u01ae\u01af\u0003h4\u0000\u01af"+ + "\u01b0\u0005H\u0000\u0000\u01b0\u01d8\u0001\u0000\u0000\u0000\u01b1\u01d8"+ + "\u0003f3\u0000\u01b2\u01d8\u0003h4\u0000\u01b3\u01d8\u0003b1\u0000\u01b4"+ + "\u01d8\u0003F#\u0000\u01b5\u01d8\u0003j5\u0000\u01b6\u01b7\u0005F\u0000"+ + "\u0000\u01b7\u01bc\u0003d2\u0000\u01b8\u01b9\u0005\'\u0000\u0000\u01b9"+ + "\u01bb\u0003d2\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01be\u0001"+ + "\u0000\u0000\u0000\u01bc\u01ba\u0001\u0000\u0000\u0000\u01bc\u01bd\u0001"+ + "\u0000\u0000\u0000\u01bd\u01bf\u0001\u0000\u0000\u0000\u01be\u01bc\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c0\u0005G\u0000\u0000\u01c0\u01d8\u0001\u0000"+ + "\u0000\u0000\u01c1\u01c2\u0005F\u0000\u0000\u01c2\u01c7\u0003b1\u0000"+ + "\u01c3\u01c4\u0005\'\u0000\u0000\u01c4\u01c6\u0003b1\u0000\u01c5\u01c3"+ + "\u0001\u0000\u0000\u0000\u01c6\u01c9\u0001\u0000\u0000\u0000\u01c7\u01c5"+ + "\u0001\u0000\u0000\u0000\u01c7\u01c8\u0001\u0000\u0000\u0000\u01c8\u01ca"+ + "\u0001\u0000\u0000\u0000\u01c9\u01c7\u0001\u0000\u0000\u0000\u01ca\u01cb"+ + "\u0005G\u0000\u0000\u01cb\u01d8\u0001\u0000\u0000\u0000\u01cc\u01cd\u0005"+ + "F\u0000\u0000\u01cd\u01d2\u0003j5\u0000\u01ce\u01cf\u0005\'\u0000\u0000"+ + "\u01cf\u01d1\u0003j5\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d4"+ + "\u0001\u0000\u0000\u0000\u01d2\u01d0\u0001\u0000\u0000\u0000\u01d2\u01d3"+ + "\u0001\u0000\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d2"+ + "\u0001\u0000\u0000\u0000\u01d5\u01d6\u0005G\u0000\u0000\u01d6\u01d8\u0001"+ + "\u0000\u0000\u0000\u01d7\u01ad\u0001\u0000\u0000\u0000\u01d7\u01ae\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b1\u0001\u0000\u0000\u0000\u01d7\u01b2\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b3\u0001\u0000\u0000\u0000\u01d7\u01b4\u0001"+ + "\u0000\u0000\u0000\u01d7\u01b5\u0001\u0000\u0000\u0000\u01d7\u01b6\u0001"+ + "\u0000\u0000\u0000\u01d7\u01c1\u0001\u0000\u0000\u0000\u01d7\u01cc\u0001"+ + "\u0000\u0000\u0000\u01d8E\u0001\u0000\u0000\u0000\u01d9\u01dc\u00055\u0000"+ + "\u0000\u01da\u01dc\u0005E\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000"+ + "\u01db\u01da\u0001\u0000\u0000\u0000\u01dcG\u0001\u0000\u0000\u0000\u01dd"+ + "\u01e1\u0003@ \u0000\u01de\u01df\u0004$\u000b\u0000\u01df\u01e1\u0003"+ + "F#\u0000\u01e0\u01dd\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000\u0000"+ + "\u0000\u01e1I\u0001\u0000\u0000\u0000\u01e2\u01e3\u0005\t\u0000\u0000"+ + "\u01e3\u01e4\u0005\u001f\u0000\u0000\u01e4K\u0001\u0000\u0000\u0000\u01e5"+ + "\u01e6\u0005\u000e\u0000\u0000\u01e6\u01eb\u0003N\'\u0000\u01e7\u01e8"+ + "\u0005\'\u0000\u0000\u01e8\u01ea\u0003N\'\u0000\u01e9\u01e7\u0001\u0000"+ + "\u0000\u0000\u01ea\u01ed\u0001\u0000\u0000\u0000\u01eb\u01e9\u0001\u0000"+ + "\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ecM\u0001\u0000\u0000"+ + "\u0000\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ee\u01f0\u0003\n\u0005\u0000"+ + "\u01ef\u01f1\u0007\u0004\u0000\u0000\u01f0\u01ef\u0001\u0000\u0000\u0000"+ + "\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f4\u0001\u0000\u0000\u0000"+ + "\u01f2\u01f3\u00053\u0000\u0000\u01f3\u01f5\u0007\u0005\u0000\u0000\u01f4"+ + "\u01f2\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5"+ + "O\u0001\u0000\u0000\u0000\u01f6\u01f7\u0005\b\u0000\u0000\u01f7\u01f8"+ + "\u0003>\u001f\u0000\u01f8Q\u0001\u0000\u0000\u0000\u01f9\u01fa\u0005\u0002"+ + "\u0000\u0000\u01fa\u01fb\u0003>\u001f\u0000\u01fbS\u0001\u0000\u0000\u0000"+ + "\u01fc\u01fd\u0005\u000b\u0000\u0000\u01fd\u0202\u0003V+\u0000\u01fe\u01ff"+ + "\u0005\'\u0000\u0000\u01ff\u0201\u0003V+\u0000\u0200\u01fe\u0001\u0000"+ + "\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202\u0200\u0001\u0000"+ + "\u0000\u0000\u0202\u0203\u0001\u0000\u0000\u0000\u0203U\u0001\u0000\u0000"+ + "\u0000\u0204\u0202\u0001\u0000\u0000\u0000\u0205\u0206\u0003<\u001e\u0000"+ + "\u0206\u0207\u0005Y\u0000\u0000\u0207\u0208\u0003<\u001e\u0000\u0208W"+ + "\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0001\u0000\u0000\u020a\u020b"+ + "\u0003\u0014\n\u0000\u020b\u020d\u0003j5\u0000\u020c\u020e\u0003^/\u0000"+ + "\u020d\u020c\u0001\u0000\u0000\u0000\u020d\u020e\u0001\u0000\u0000\u0000"+ + "\u020eY\u0001\u0000\u0000\u0000\u020f\u0210\u0005\u0007\u0000\u0000\u0210"+ + "\u0211\u0003\u0014\n\u0000\u0211\u0212\u0003j5\u0000\u0212[\u0001\u0000"+ + "\u0000\u0000\u0213\u0214\u0005\n\u0000\u0000\u0214\u0215\u0003:\u001d"+ + "\u0000\u0215]\u0001\u0000\u0000\u0000\u0216\u021b\u0003`0\u0000\u0217"+ + "\u0218\u0005\'\u0000\u0000\u0218\u021a\u0003`0\u0000\u0219\u0217\u0001"+ + "\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b\u0219\u0001"+ + "\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c_\u0001\u0000"+ + "\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e\u021f\u0003@ \u0000"+ + "\u021f\u0220\u0005$\u0000\u0000\u0220\u0221\u0003D\"\u0000\u0221a\u0001"+ + "\u0000\u0000\u0000\u0222\u0223\u0007\u0006\u0000\u0000\u0223c\u0001\u0000"+ + "\u0000\u0000\u0224\u0227\u0003f3\u0000\u0225\u0227\u0003h4\u0000\u0226"+ + "\u0224\u0001\u0000\u0000\u0000\u0226\u0225\u0001\u0000\u0000\u0000\u0227"+ + "e\u0001\u0000\u0000\u0000\u0228\u022a\u0007\u0000\u0000\u0000\u0229\u0228"+ + "\u0001\u0000\u0000\u0000\u0229\u022a\u0001\u0000\u0000\u0000\u022a\u022b"+ + "\u0001\u0000\u0000\u0000\u022b\u022c\u0005 \u0000\u0000\u022cg\u0001\u0000"+ + "\u0000\u0000\u022d\u022f\u0007\u0000\u0000\u0000\u022e\u022d\u0001\u0000"+ + "\u0000\u0000\u022e\u022f\u0001\u0000\u0000\u0000\u022f\u0230\u0001\u0000"+ + "\u0000\u0000\u0230\u0231\u0005\u001f\u0000\u0000\u0231i\u0001\u0000\u0000"+ + "\u0000\u0232\u0233\u0005\u001e\u0000\u0000\u0233k\u0001\u0000\u0000\u0000"+ + "\u0234\u0235\u0007\u0007\u0000\u0000\u0235m\u0001\u0000\u0000\u0000\u0236"+ + "\u0237\u0005\u0005\u0000\u0000\u0237\u0238\u0003p8\u0000\u0238o\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0005F\u0000\u0000\u023a\u023b\u0003\u0002"+ + "\u0001\u0000\u023b\u023c\u0005G\u0000\u0000\u023cq\u0001\u0000\u0000\u0000"+ + "\u023d\u023e\u0005\r\u0000\u0000\u023e\u023f\u0005i\u0000\u0000\u023f"+ + "s\u0001\u0000\u0000\u0000\u0240\u0241\u0005\u0003\u0000\u0000\u0241\u0244"+ + "\u0005_\u0000\u0000\u0242\u0243\u0005]\u0000\u0000\u0243\u0245\u0003<"+ + "\u001e\u0000\u0244\u0242\u0001\u0000\u0000\u0000\u0244\u0245\u0001\u0000"+ + "\u0000\u0000\u0245\u024f\u0001\u0000\u0000\u0000\u0246\u0247\u0005^\u0000"+ + "\u0000\u0247\u024c\u0003v;\u0000\u0248\u0249\u0005\'\u0000\u0000\u0249"+ + "\u024b\u0003v;\u0000\u024a\u0248\u0001\u0000\u0000\u0000\u024b\u024e\u0001"+ + "\u0000\u0000\u0000\u024c\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001"+ + "\u0000\u0000\u0000\u024d\u0250\u0001\u0000\u0000\u0000\u024e\u024c\u0001"+ + "\u0000\u0000\u0000\u024f\u0246\u0001\u0000\u0000\u0000\u024f\u0250\u0001"+ + "\u0000\u0000\u0000\u0250u\u0001\u0000\u0000\u0000\u0251\u0252\u0003<\u001e"+ + "\u0000\u0252\u0253\u0005$\u0000\u0000\u0253\u0255\u0001\u0000\u0000\u0000"+ + "\u0254\u0251\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000\u0000"+ + "\u0255\u0256\u0001\u0000\u0000\u0000\u0256\u0257\u0003<\u001e\u0000\u0257"+ + "w\u0001\u0000\u0000\u0000\u0258\u0259\u0005\u0012\u0000\u0000\u0259\u025a"+ + "\u0003$\u0012\u0000\u025a\u025b\u0005]\u0000\u0000\u025b\u025c\u0003>"+ + "\u001f\u0000\u025cy\u0001\u0000\u0000\u0000\u025d\u025e\u0005\u0011\u0000"+ + "\u0000\u025e\u0261\u00036\u001b\u0000\u025f\u0260\u0005!\u0000\u0000\u0260"+ + "\u0262\u0003\u001e\u000f\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0261"+ + "\u0262\u0001\u0000\u0000\u0000\u0262{\u0001\u0000\u0000\u0000\u0263\u0265"+ + "\u0007\b\u0000\u0000\u0264\u0263\u0001\u0000\u0000\u0000\u0264\u0265\u0001"+ + "\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0005"+ + "\u0014\u0000\u0000\u0267\u0268\u0003~?\u0000\u0268\u0269\u0003\u0080@"+ + "\u0000\u0269}\u0001\u0000\u0000\u0000\u026a\u026d\u0003@ \u0000\u026b"+ + "\u026c\u0005Y\u0000\u0000\u026c\u026e\u0003@ \u0000\u026d\u026b\u0001"+ + "\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e\u007f\u0001"+ + "\u0000\u0000\u0000\u026f\u0270\u0005]\u0000\u0000\u0270\u0275\u0003\u0082"+ + "A\u0000\u0271\u0272\u0005\'\u0000\u0000\u0272\u0274\u0003\u0082A\u0000"+ + "\u0273\u0271\u0001\u0000\u0000\u0000\u0274\u0277\u0001\u0000\u0000\u0000"+ + "\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000"+ + "\u0276\u0081\u0001\u0000\u0000\u0000\u0277\u0275\u0001\u0000\u0000\u0000"+ + "\u0278\u0279\u0003\u0010\b\u0000\u0279\u0083\u0001\u0000\u0000\u0000="+ + "\u008f\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9"+ + "\u00f4\u00fa\u0102\u0104\u010f\u0116\u0121\u0124\u0134\u013a\u0144\u0148"+ + "\u014d\u0157\u015f\u016c\u0170\u0174\u017b\u017f\u0186\u018c\u0193\u019b"+ + "\u01a3\u01ab\u01bc\u01c7\u01d2\u01d7\u01db\u01e0\u01eb\u01f0\u01f4\u0202"+ + "\u020d\u021b\u0226\u0229\u022e\u0244\u024c\u024f\u0254\u0261\u0264\u026d"+ + "\u0275"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java index 0705ae7f778cd..484a655fc2988 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ParsingException.java @@ -18,7 +18,7 @@ public class ParsingException extends EsqlClientException { public ParsingException(String message, Exception cause, int line, int charPositionInLine) { super(message, cause); this.line = line; - this.charPositionInLine = charPositionInLine; + this.charPositionInLine = charPositionInLine + 1; } ParsingException(String message, Object... args) { @@ -42,7 +42,7 @@ public int getLineNumber() { } public int getColumnNumber() { - return charPositionInLine + 1; + return charPositionInLine; } public String getErrorMessage() { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 4574c3720f8ee..5ae3702993fcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -91,7 +91,7 @@ public List rightFields() { public Set addedFields() { if (lazyAddedFields == null) { - lazyAddedFields = outputSet(); + lazyAddedFields = new AttributeSet(output()); lazyAddedFields.removeAll(left().output()); } return lazyAddedFields; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 4c6a5f5426cfd..1580b77931240 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -88,7 +88,6 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.IsNotNulls(), new ExpressionTranslators.Nots(), new ExpressionTranslators.Likes(), - new ExpressionTranslators.StringQueries(), new ExpressionTranslators.MultiMatches(), new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), @@ -539,7 +538,7 @@ protected Query asQuery(Match match, TranslatorHandler handler) { public static class QueryStringFunctionTranslator extends ExpressionTranslator { @Override protected Query asQuery(QueryString queryString, TranslatorHandler handler) { - return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), null); + return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), Map.of()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index c181f434368e0..1096c917fed4f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -362,11 +362,10 @@ private PhysicalOperation planTopN(TopNExec topNExec, LocalExecutionPlannerConte case VERSION -> TopNEncoder.VERSION; case BOOLEAN, NULL, BYTE, SHORT, INTEGER, LONG, DOUBLE, FLOAT, HALF_FLOAT, DATETIME, DATE_NANOS, DATE_PERIOD, TIME_DURATION, OBJECT, SCALED_FLOAT, UNSIGNED_LONG, DOC_DATA_TYPE, TSID_DATA_TYPE -> TopNEncoder.DEFAULT_SORTABLE; - case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE -> + case GEO_POINT, CARTESIAN_POINT, GEO_SHAPE, CARTESIAN_SHAPE, COUNTER_LONG, COUNTER_INTEGER, COUNTER_DOUBLE, SOURCE -> TopNEncoder.DEFAULT_UNSORTABLE; // unsupported fields are encoded as BytesRef, we'll use the same encoder; all values should be null at this point case PARTIAL_AGG, UNSUPPORTED -> TopNEncoder.UNSUPPORTED; - case SOURCE -> throw new EsqlIllegalArgumentException("No TopN sorting encoder for type " + inverse.get(channel).type()); }; } List orders = topNExec.order().stream().map(order -> { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 266f07d22eaf5..a347a6947bf67 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -16,7 +15,6 @@ import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Collections; -import java.util.Map; import java.util.Set; /** @@ -48,34 +46,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y"); - /** - * When we added the warnings for multivalued fields emitting {@code null} - * when they touched multivalued fields. Added in #102417. - */ - private static final NodeFeature MV_WARN = new NodeFeature("esql.mv_warn"); - - /** - * Support for loading {@code geo_point} and {@code cartesian_point} fields. Added in #102177. - */ - private static final NodeFeature SPATIAL_POINTS = new NodeFeature("esql.spatial_points"); - /** * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. */ private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source"); - /** - * When we added the warnings when conversion functions fail. Like {@code TO_INT('foo')}. - * Added in ESQL-1183. - */ - private static final NodeFeature CONVERT_WARN = new NodeFeature("esql.convert_warn"); - - /** - * When we flipped the return type of {@code POW} to always return a double. Changed - * in #102183. - */ - private static final NodeFeature POW_DOUBLE = new NodeFeature("esql.pow_double"); - /** * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. */ @@ -152,12 +127,6 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); - /** - * Support for loading values over enrich. This is supported by all versions of ESQL but not - * the unit test CsvTests. - */ - public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); - /** * Support for timespan units abbreviations */ @@ -215,16 +184,4 @@ public Set getFeatures() { return features; } } - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - Map.entry(TransportEsqlStatsAction.ESQL_STATS_FEATURE, Version.V_8_11_0), - Map.entry(MV_WARN, Version.V_8_12_0), - Map.entry(SPATIAL_POINTS, Version.V_8_12_0), - Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0), - Map.entry(ENRICH_LOAD, Version.V_8_12_0) - ); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java index 985dcf118ac54..4067fc5a4e065 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -34,8 +33,6 @@ public class TransportEsqlStatsAction extends TransportNodesAction< EsqlStatsResponse.NodeStatsResponse, Void> { - static final NodeFeature ESQL_STATS_FEATURE = new NodeFeature("esql.stats_node"); - // the plan executor holds the metrics private final FeatureService featureService; private final PlanExecutor planExecutor; @@ -63,13 +60,7 @@ public TransportEsqlStatsAction( @Override protected DiscoveryNode[] resolveRequest(EsqlStatsRequest request, ClusterState clusterState) { - if (featureService.clusterHasFeature(clusterState, ESQL_STATS_FEATURE)) { - // use the whole cluster - return super.resolveRequest(request, clusterState); - } else { - // not all nodes in the cluster have upgraded to esql - just use this node for now - return new DiscoveryNode[] { clusterService.localNode() }; - } + return super.resolveRequest(request, clusterState); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 11590cb877db7..010a60ef7da15 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -236,7 +236,10 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); - assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); + assumeFalse( + "enrich can't load fields in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.ENRICH_LOAD.capabilityName()) + ); assumeFalse( "can't use match in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.capabilityName()) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index a389923afee79..2770ed1f336ae 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -2317,8 +2317,6 @@ public void testInvalidNamedParamsForIdentifierPatterns() { } public void testFromEnrichAndMatchColonUsage() { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - LogicalPlan plan = analyze(""" from *:test | EVAL x = to_string(languages) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index c1604bb5f5d06..f25b19c4e5d1c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -404,6 +404,11 @@ public void testAggFilterOnBucketingOrAggFunctions() { query("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by bucket(salary, 10)"); // but fails if it's different + assertEquals( + "1:32: can only use grouping function [bucket(a, 3)] part of the BY clause", + error("row a = 1 | stats sum(a) where bucket(a, 3) > -1 by bucket(a,2)") + ); + assertEquals( "1:40: can only use grouping function [bucket(salary, 10)] part of the BY clause", error("from test | stats max(languages) WHERE bucket(salary, 10) > 1 by emp_no") @@ -771,40 +776,40 @@ public void testWrongInputParam() { public void testPeriodAndDurationInRowAssignment() { for (var unit : TIME_DURATIONS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::time_duration] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::time_duration] directly in a row assignment", error("row a = 1 " + unit + "::time_duration") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::time_duration] directly in a row assignment", error("row a = \"1 " + unit + "\"::time_duration") ); assertEquals( - "1:5: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_timeduration(1 " + unit + ")] directly in a row assignment", error("row a = to_timeduration(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_timeduration(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_timeduration(\"1 " + unit + "\")") ); } for (var unit : DATE_PERIODS) { - assertEquals("1:5: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); + assertEquals("1:9: cannot use [1 " + unit + "] directly in a row assignment", error("row a = 1 " + unit)); assertEquals( - "1:5: cannot use [1 " + unit + "::date_period] directly in a row assignment", + "1:9: cannot use [1 " + unit + "::date_period] directly in a row assignment", error("row a = 1 " + unit + "::date_period") ); assertEquals( - "1:5: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", + "1:9: cannot use [\"1 " + unit + "\"::date_period] directly in a row assignment", error("row a = \"1 " + unit + "\"::date_period") ); assertEquals( - "1:5: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(1 " + unit + ")] directly in a row assignment", error("row a = to_dateperiod(1 " + unit + ")") ); assertEquals( - "1:5: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", + "1:9: cannot use [to_dateperiod(\"1 " + unit + "\")] directly in a row assignment", error("row a = to_dateperiod(\"1 " + unit + "\")") ); } @@ -1159,8 +1164,6 @@ public void testMatchInsideEval() throws Exception { } public void testMatchFilter() throws Exception { - assumeTrue("Match operator is available just for snapshots", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - assertEquals( "1:19: first argument of [salary:\"100\"] must be [string], found value [salary] type [integer]", error("from test | where salary:\"100\"") @@ -1190,7 +1193,6 @@ public void testMatchFunctionNotAllowedAfterCommands() throws Exception { } public void testMatchFunctionAndOperatorHaveCorrectErrorMessages() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:24: [MATCH] function cannot be used after LIMIT", error("from test | limit 10 | where match(first_name, \"Anna\")") @@ -1334,7 +1336,6 @@ public void testMatchFunctionOnlyAllowedInWhere() throws Exception { } public void testMatchOperatornOnlyAllowedInWhere() throws Exception { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsOnlyAllowedInWhere(":", "first_name:\"Anna\"", "operator"); } @@ -1399,8 +1400,6 @@ public void testMatchFunctionWithDisjunctions() { } public void testMatchOperatorWithDisjunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - checkWithDisjunctions(":", "first_name : \"Anna\"", "operator"); } @@ -1463,7 +1462,6 @@ public void testMatchFunctionWithNonBooleanFunctions() { } public void testMatchOperatorWithNonBooleanFunctions() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); checkFullTextFunctionsWithNonBooleanFunctions(":", "first_name:\"Anna\"", "operator"); } @@ -1541,8 +1539,6 @@ public void testMatchFunctionCurrentlyUnsupportedBehaviour() throws Exception { "1:68: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where match(first_name, \"Anna\")") ); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals( "1:62: Unknown column [first_name]", error("from test | stats max_salary = max(salary) by emp_no | where first_name : \"Anna\"") @@ -1562,8 +1558,6 @@ public void testMatchFunctionNullArgs() throws Exception { public void testMatchTargetsExistingField() throws Exception { assertEquals("1:39: Unknown column [first_name]", error("from test | keep emp_no | where match(first_name, \"Anna\")")); - - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); assertEquals("1:33: Unknown column [first_name]", error("from test | keep emp_no | where first_name : \"Anna\"")); } @@ -1827,6 +1821,8 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1854,6 +1850,8 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); assertEquals( @@ -1867,6 +1865,8 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); assertEquals( @@ -1888,6 +1888,13 @@ public void testCategorizeWithinAggregations() { ); } + public void testSortByAggregate() { + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT count(*)")); + assertEquals("1:28: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT to_string(count(*))")); + assertEquals("1:22: Aggregate functions are not allowed in SORT [MAX]", error("ROW a = 1 | SORT 1 + max(a)")); + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("FROM test | SORT count(*)")); + } + private void query(String query) { defaultAnalyzer.analyze(parser.createStatement(query)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 181b8d52bf888..7802d74d2264f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; +import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; @@ -130,7 +131,9 @@ public abstract class AbstractFunctionTestCase extends ESTestCase { entry("mod", Mod.class), entry("neg", Neg.class), entry("is_null", IsNull.class), - entry("is_not_null", IsNotNull.class) + entry("is_not_null", IsNotNull.class), + // Match operator is both a function and an operator + entry("match_operator", Match.class) ); private static EsqlFunctionRegistry functionRegistry = new EsqlFunctionRegistry().snapshotRegistry(); @@ -813,6 +816,10 @@ private static String buildSignatureSvg(String name) throws IOException { if (unaryOperator != null) { return RailRoadDiagram.unaryOperator(unaryOperator); } + String searchOperator = searchOperator(name); + if (searchOperator != null) { + return RailRoadDiagram.searchOperator(searchOperator); + } FunctionDefinition definition = definition(name); if (definition != null) { return RailRoadDiagram.functionSignature(definition); @@ -862,7 +869,7 @@ public static void renderDocs() throws IOException { return; } String name = functionName(); - if (binaryOperator(name) != null || unaryOperator(name) != null || likeOrInOperator(name)) { + if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { renderDocsForOperators(name); return; } @@ -1258,6 +1265,16 @@ private static String binaryOperator(String name) { }; } + /** + * If this test is a for a search operator return its symbol, otherwise return {@code null}. + */ + private static String searchOperator(String name) { + return switch (name) { + case "match_operator" -> ":"; + default -> null; + }; + } + /** * If this tests is for a unary operator return its symbol, otherwise return {@code null}. * This is functionally the reverse of {@link ExpressionBuilder#visitArithmeticUnary}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java index df0737feadd8d..43e2ededeff0e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/RailRoadDiagram.java @@ -89,6 +89,18 @@ static String binaryOperator(String operator) throws IOException { return toSvg(new Sequence(expressions.toArray(Expression[]::new))); } + /** + * Generate a railroad diagram for a search operator. The output would look like + * {@code field : value}. + */ + static String searchOperator(String operator) throws IOException { + List expressions = new ArrayList<>(); + expressions.add(new Literal("field")); + expressions.add(new Syntax(operator)); + expressions.add(new Literal("query")); + return toSvg(new Sequence(expressions.toArray(Expression[]::new))); + } + /** * Generate a railroad diagram for unary operator. The output would look like * {@code -v}. diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java new file mode 100644 index 0000000000000..32e9670286ef7 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchOperatorTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionName; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; + +import java.util.LinkedList; +import java.util.List; +import java.util.function.Supplier; + +/** + * This class is only used to generates docs for the match operator - all testing is done in {@link MatchTests} + */ +@FunctionName("match_operator") +public class MatchOperatorTests extends MatchTests { + + public MatchOperatorTests(@Name("TestCase") Supplier testCaseSupplier) { + super(testCaseSupplier); + } + + @ParametersFactory + public static Iterable parameters() { + // Have a minimal test so that we can generate the appropriate types in the docs + List suppliers = new LinkedList<>(); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.KEYWORD), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.KEYWORD, DataType.TEXT), suppliers); + addPositiveTestCase(List.of(DataType.TEXT, DataType.KEYWORD), suppliers); + return parameterSuppliersFromTypedData(suppliers); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index 6d0c45a972299..6a4a7404135f9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -36,19 +36,11 @@ public MatchTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { - Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); - Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); - Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); - List> supportedPerPosition = List.of( - supportedTextParams, - supportedTextParams, - supportedNumericParams, - supportedFuzzinessParams - ); + List> supportedPerPosition = supportedParams(); List suppliers = new LinkedList<>(); for (DataType fieldType : DataType.stringTypes()) { for (DataType queryType : DataType.stringTypes()) { - addPositiveTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); + addPositiveTestCase(List.of(fieldType, queryType), suppliers); addNonFieldTestCase(List.of(fieldType, queryType), supportedPerPosition, suppliers); } } @@ -61,11 +53,20 @@ public static Iterable parameters() { ); } - private static void addPositiveTestCase( - List paramDataTypes, - List> supportedPerPosition, - List suppliers - ) { + protected static List> supportedParams() { + Set supportedTextParams = Set.of(DataType.KEYWORD, DataType.TEXT); + Set supportedNumericParams = Set.of(DataType.DOUBLE, DataType.INTEGER); + Set supportedFuzzinessParams = Set.of(DataType.INTEGER, DataType.KEYWORD, DataType.TEXT); + List> supportedPerPosition = List.of( + supportedTextParams, + supportedTextParams, + supportedNumericParams, + supportedFuzzinessParams + ); + return supportedPerPosition; + } + + protected static void addPositiveTestCase(List paramDataTypes, List suppliers) { // Positive case - creates an ES field from the field parameter type suppliers.add( diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java deleted file mode 100644 index ff00a161e1bb1..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.predicate.operator.fulltext; - -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; - -import java.io.IOException; - -public class StringQuerySerializationTests extends AbstractFulltextSerializationTests { - - private static final String COMMA = ","; - - @Override - protected final StringQueryPredicate createTestInstance() { - return new StringQueryPredicate(randomSource(), randomAlphaOfLength(randomIntBetween(1, 16)), randomOptionOrNull()); - } - - @Override - protected StringQueryPredicate mutateInstance(StringQueryPredicate instance) throws IOException { - var query = instance.query(); - var options = instance.options(); - if (randomBoolean()) { - query = randomValueOtherThan(query, () -> randomAlphaOfLength(randomIntBetween(1, 16))); - } else { - options = randomValueOtherThan(options, this::randomOptionOrNull); - } - return new StringQueryPredicate(instance.source(), query, options); - } -} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 0d0d78f81ffef..5c824471f369e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.core.enrich.EnrichPolicy; import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestSearchStats; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.EnrichResolution; @@ -1270,8 +1269,6 @@ public void testMissingFieldsDoNotGetExtracted() { * estimatedRowSize[324] */ public void testSingleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - var plan = plannerOptimizer.plan(""" from test | where first_name:"Anna" @@ -1302,8 +1299,6 @@ public void testSingleMatchFilterPushdown() { * [_doc{f}#22], limit[1000], sort[[FieldSort[field=emp_no{f}#12, direction=ASC, nulls=LAST]]] estimatedRowSize[336] */ public void testMultipleMatchFilterPushdown() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); - String query = """ from test | where first_name:"Anna" and first_name:"Anneke" diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 2a55379db69d1..96951ee15d48b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -2563,7 +2563,7 @@ public void testSimplifyRLikeMatchAll() { public void testRLikeWrongPattern() { String query = "from test | where first_name rlike \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid regex pattern for RLIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[invalid range: from (95) cannot be > to (93)]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); @@ -2571,7 +2571,7 @@ public void testRLikeWrongPattern() { public void testLikeWrongPattern() { String query = "from test | where first_name like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\""; - String error = "line 1:20: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + String error = "line 1:19: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]"; ParsingException e = expectThrows(ParsingException.class, () -> plan(query)); assertThat(e.getMessage(), is(error)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 0177747d27243..710637c05a900 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -134,7 +134,7 @@ public void testStringLiteralsExceptions() { ); var number = "1" + IntStream.range(0, 309).mapToObj(ignored -> "0").collect(Collectors.joining()); - assertParsingException(() -> parse("row foo == " + number), "line 1:13: Number [" + number + "] is too large"); + assertParsingException(() -> parse("row foo == " + number), "line 1:12: Number [" + number + "] is too large"); } public void testBooleanLiteralsCondition() { @@ -442,20 +442,20 @@ public void testOverflowingValueForDuration() { for (String unit : List.of("milliseconds", "seconds", "minutes", "hours")) { assertParsingException( () -> parse("row x = 9223372036854775808 " + unit), // unsigned_long (Long.MAX_VALUE + 1) - "line 1:10: Number [9223372036854775808] outside of [" + unit + "] range" + "line 1:9: Number [9223372036854775808] outside of [" + unit + "] range" ); assertParsingException( () -> parse("row x = 18446744073709551616 " + unit), // double (UNSIGNED_LONG_MAX + 1) - "line 1:10: Number [18446744073709551616] outside of [" + unit + "] range" + "line 1:9: Number [18446744073709551616] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 153722867280912931 minutes"), // Long.MAX_VALUE / 60 + 1 - "line 1:10: Number [153722867280912931] outside of [minutes] range" + "line 1:9: Number [153722867280912931] outside of [minutes] range" ); assertParsingException( () -> parse("row x = 2562047788015216 hours"), // Long.MAX_VALUE / 3600 + 1 - "line 1:10: Number [2562047788015216] outside of [hours] range" + "line 1:9: Number [2562047788015216] outside of [hours] range" ); } @@ -463,12 +463,12 @@ public void testOverflowingValueForPeriod() { for (String unit : List.of("days", "weeks", "months", "years")) { assertParsingException( () -> parse("row x = 2147483648 " + unit), // long (Integer.MAX_VALUE + 1) - "line 1:10: Number [2147483648] outside of [" + unit + "] range" + "line 1:9: Number [2147483648] outside of [" + unit + "] range" ); } assertParsingException( () -> parse("row x = 306783379 weeks"), // Integer.MAX_VALUE / 7 + 1 - "line 1:10: Number [306783379] outside of [weeks] range" + "line 1:9: Number [306783379] outside of [weeks] range" ); } @@ -544,7 +544,7 @@ public void testWildcardProjectAwayPatterns() { } public void testForbidWildcardProjectAway() { - assertParsingException(() -> dropExpression("foo, *"), "line 1:21: Removing all fields is not allowed [*]"); + assertParsingException(() -> dropExpression("foo, *"), "line 1:20: Removing all fields is not allowed [*]"); } public void testForbidMultipleIncludeStar() { @@ -608,7 +608,7 @@ public void testMultipleProjectPatterns() { } public void testForbidWildcardProjectRename() { - assertParsingException(() -> renameExpression("b* AS a*"), "line 1:18: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); + assertParsingException(() -> renameExpression("b* AS a*"), "line 1:17: Using wildcards [*] in RENAME is not allowed [b* AS a*]"); } public void testSimplifyInWithSingleElementList() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 4c1a9228b26e2..69c00eb395fdb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -525,10 +525,10 @@ private void clusterAndIndexAsLookupIndexPattern(String clusterAndIndex) { public void testInvalidCharacterInIndexPattern() { Map commands = new HashMap<>(); - commands.put("FROM {}", "line 1:7: "); + commands.put("FROM {}", "line 1:6: "); if (Build.current().isSnapshot()) { - commands.put("METRICS {}", "line 1:10: "); - commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:23: "); + commands.put("METRICS {}", "line 1:9: "); + commands.put("ROW x = 1 | LOOKUP_🐔 {} ON j", "line 1:22: "); } String lineNumber; for (String command : commands.keySet()) { @@ -572,7 +572,7 @@ public void testInvalidCharacterInIndexPattern() { continue; } - lineNumber = command.contains("FROM") ? "line 1:21: " : "line 1:24: "; + lineNumber = command.contains("FROM") ? "line 1:20: " : "line 1:23: "; expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, --indexpattern", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "indexpattern, \"--indexpattern\"", lineNumber, "-indexpattern"); expectInvalidIndexNameErrorWithLineNumber(command, "\"indexpattern, --indexpattern\"", commands.get(command), "-indexpattern"); @@ -585,7 +585,7 @@ public void testInvalidCharacterInIndexPattern() { if (command.contains("LOOKUP_🐔")) { continue; } - lineNumber = command.contains("FROM") ? "line 1:10: " : "line 1:13: "; + lineNumber = command.contains("FROM") ? "line 1:9: " : "line 1:12: "; clustersAndIndices(command, "*", "-index#pattern"); clustersAndIndices(command, "index*", "-index#pattern"); clustersAndIndices(command, "*", "-<--logstash-{now/M{yyyy.MM}}>"); @@ -885,18 +885,18 @@ public void testSuggestAvailableProcessingCommandsOnParsingError() { public void testDeprecatedIsNullFunction() { expectError( "from test | eval x = is_null(f)", - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); expectError( "row x = is_null(f)", - "line 1:10: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:9: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); if (Build.current().isSnapshot()) { expectError( "from test | eval x = ?fn1(f)", List.of(paramAsIdentifier("fn1", "IS_NULL")), - "line 1:23: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" + "line 1:22: is_null function is not supported anymore, please use 'is null'/'is not null' predicates instead" ); } } @@ -911,23 +911,23 @@ public void testMetadataFieldOnOtherSources() { } public void testMetadataFieldMultipleDeclarations() { - expectError("from test metadata _index, _version, _index", "1:39: metadata field [_index] already declared [@1:20]"); + expectError("from test metadata _index, _version, _index", "1:38: metadata field [_index] already declared [@1:20]"); } public void testMetadataFieldUnsupportedPrimitiveType() { - expectError("from test metadata _tier", "line 1:21: unsupported metadata field [_tier]"); + expectError("from test metadata _tier", "line 1:20: unsupported metadata field [_tier]"); } public void testMetadataFieldUnsupportedCustomType() { - expectError("from test metadata _feature", "line 1:21: unsupported metadata field [_feature]"); + expectError("from test metadata _feature", "line 1:20: unsupported metadata field [_feature]"); } public void testMetadataFieldNotFoundNonExistent() { - expectError("from test metadata _doesnot_compute", "line 1:21: unsupported metadata field [_doesnot_compute]"); + expectError("from test metadata _doesnot_compute", "line 1:20: unsupported metadata field [_doesnot_compute]"); } public void testMetadataFieldNotFoundNormalField() { - expectError("from test metadata emp_no", "line 1:21: unsupported metadata field [emp_no]"); + expectError("from test metadata emp_no", "line 1:20: unsupported metadata field [emp_no]"); } public void testDissectPattern() { @@ -985,13 +985,13 @@ public void testGrokPattern() { expectError( "row a = \"foo bar\" | GROK a \"%{NUMBER:foo} %{WORD:foo}\"", - "line 1:22: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + "line 1:21: Invalid GROK pattern [%{NUMBER:foo} %{WORD:foo}]:" + " the attribute [foo] is defined multiple times with different types" ); expectError( "row a = \"foo\" | GROK a \"(?P.+)\"", - "line 1:18: Invalid grok pattern [(?P.+)]: [undefined group option]" + "line 1:17: Invalid grok pattern [(?P.+)]: [undefined group option]" ); } @@ -1015,7 +1015,7 @@ public void testLikeRLike() { expectError( "from a | where foo like \"(?i)(^|[^a-zA-Z0-9_-])nmap($|\\\\.)\"", - "line 1:17: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "line 1:16: Invalid pattern for LIKE [(?i)(^|[^a-zA-Z0-9_-])nmap($|\\.)]: " + "[Invalid sequence - escape character is not followed by special wildcard char]" ); } @@ -1076,7 +1076,7 @@ public void testEnrich() { ); expectError( "from a | enrich typo:countries on foo", - "line 1:18: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" + "line 1:17: Unrecognized value [typo], ENRICH policy qualifier needs to be one of [_ANY, _COORDINATOR, _REMOTE]" ); } @@ -1261,8 +1261,8 @@ public void testInvalidPositionalParams() { expectError( "from test | where x < ?0 and y < ?2", List.of(paramAsConstant(null, 5)), - "line 1:24: No parameter is defined for position 0, did you mean position 1?; " - + "line 1:35: No parameter is defined for position 2, did you mean position 1?" + "line 1:23: No parameter is defined for position 0, did you mean position 1?; " + + "line 1:34: No parameter is defined for position 2, did you mean position 1?" ); expectError( @@ -2107,11 +2107,11 @@ public void testEnrichOnMatchField() { } public void testInlineConvertWithNonexistentType() { - expectError("ROW 1::doesnotexist", "line 1:9: Unknown data type named [doesnotexist]"); - expectError("ROW \"1\"::doesnotexist", "line 1:11: Unknown data type named [doesnotexist]"); - expectError("ROW false::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); - expectError("ROW abs(1)::doesnotexist", "line 1:14: Unknown data type named [doesnotexist]"); - expectError("ROW (1+2)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW 1::doesnotexist", "line 1:8: Unknown data type named [doesnotexist]"); + expectError("ROW \"1\"::doesnotexist", "line 1:10: Unknown data type named [doesnotexist]"); + expectError("ROW false::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); + expectError("ROW abs(1)::doesnotexist", "line 1:13: Unknown data type named [doesnotexist]"); + expectError("ROW (1+2)::doesnotexist", "line 1:12: Unknown data type named [doesnotexist]"); } public void testLookup() { @@ -2131,7 +2131,7 @@ public void testLookup() { } public void testInlineConvertUnsupportedType() { - expectError("ROW 3::BYTE", "line 1:6: Unsupported conversion to type [BYTE]"); + expectError("ROW 3::BYTE", "line 1:5: Unsupported conversion to type [BYTE]"); } public void testMetricsWithoutStats() { @@ -2300,7 +2300,6 @@ public void testMetricWithGroupKeyAsAgg() { } public void testMatchOperatorConstantQueryString() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); var plan = statement("FROM test | WHERE field:\"value\""); var filter = as(plan, Filter.class); var match = (Match) filter.condition(); @@ -2310,7 +2309,6 @@ public void testMatchOperatorConstantQueryString() { } public void testInvalidMatchOperator() { - assumeTrue("skipping because MATCH operator is not enabled", EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.isEnabled()); expectError("from test | WHERE field:", "line 1:25: mismatched input '' expecting {QUOTED_STRING, "); expectError( "from test | WHERE field:CONCAT(\"hello\", \"world\")", diff --git a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml index e74283bc873e3..ab150e41f310a 100644 --- a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml +++ b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml @@ -3,6 +3,12 @@ fleet.post_secret: body: '{"value": "test secret"}' - set: { id: id } + # search node needs to be available for fleet.get_secret to work in stateless. + # The `.fleet-secrets` index is created on demand, and its search replica starts out unassigned, + # so wait_for_no_uninitialized_shards can miss it. + - do: + cluster.health: + wait_for_active_shards: all - do: fleet.get_secret: id: $id diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 78e064b42bbb2..f5773e73f2b22 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -134,7 +134,8 @@ public void testApisWithoutTaskType() throws IOException { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { List services = getAllServices(); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(18)); } else { assertThat(services.size(), equalTo(17)); @@ -169,7 +170,8 @@ public void testGetServicesWithoutTaskType() throws IOException { "watsonxai" ) ); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(6, "elastic"); } assertArrayEquals(providers, providerList.toArray()); @@ -257,7 +259,8 @@ public void testGetServicesWithCompletionTaskType() throws IOException { public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { List services = getServices(TaskType.SPARSE_EMBEDDING); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(5)); } else { assertThat(services.size(), equalTo(4)); @@ -272,7 +275,8 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { Arrays.sort(providers); var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(1, "elastic"); } assertArrayEquals(providers, providerList.toArray()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 62405a2e9f7de..48458bf4f5086 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -91,7 +93,6 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; @@ -113,6 +114,9 @@ import java.util.stream.Stream; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin, SearchPlugin { @@ -135,11 +139,13 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; + private static final Logger log = LogManager.getLogger(InferencePlugin.class); + private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); - private final SetOnce eisComponents = new SetOnce<>(); + private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); private List inferenceServiceExtensions; @@ -207,12 +213,35 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - ElasticInferenceServiceSettings eisSettings = new ElasticInferenceServiceSettings(settings); - eisComponents.set(new ElasticInferenceServiceComponents(eisSettings.getEisGatewayUrl())); + // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing + // internal names like "eis" or "gateway". + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); + } + + if (elasticInferenceUrl != null) { + elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( - () -> List.of(context -> new ElasticInferenceService(httpFactory.get(), serviceComponents.get(), eisComponents.get())) + () -> List.of( + context -> new ElasticInferenceService( + httpFactory.get(), + serviceComponents.get(), + elasticInferenceServiceComponents.get() + ) + ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java index c8ada6e535b63..fa096901ed67a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java @@ -15,9 +15,11 @@ import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsModel; import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import java.util.Locale; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceActionCreator implements ElasticInferenceServiceActionVisitor { @@ -36,7 +38,10 @@ public ElasticInferenceServiceActionCreator(Sender sender, ServiceComponents ser @Override public ExecutableAction create(ElasticInferenceServiceSparseEmbeddingsModel model) { var requestManager = new ElasticInferenceServiceSparseEmbeddingsRequestManager(model, serviceComponents, traceContext); - var errorMessage = constructFailedToSendRequestMessage(model.uri(), "Elastic Inference Service sparse embeddings"); + var errorMessage = constructFailedToSendRequestMessage( + model.uri(), + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER) + ); return new SenderExecutableAction(sender, requestManager, errorMessage); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java index e7ee41525f07d..bf3409888aaf8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java @@ -22,9 +22,11 @@ import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.util.List; +import java.util.Locale; import java.util.function.Supplier; import static org.elasticsearch.xpack.inference.common.Truncator.truncate; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends ElasticInferenceServiceRequestManager { @@ -40,7 +42,7 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends Elast private static ResponseHandler createSparseEmbeddingsHandler() { return new ElasticInferenceServiceResponseHandler( - "Elastic Inference Service sparse embeddings", + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), ElasticInferenceServiceSparseEmbeddingsResponseEntity::fromResponse ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java index 2b36cc5d22cd4..42ca45f75a9c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java @@ -33,7 +33,7 @@ public class ElasticInferenceServiceSparseEmbeddingsResponseEntity { "Failed to find required field [%s] in Elastic Inference Service embeddings response"; /** - * Parses the EIS json response. + * Parses the Elastic Inference Service json response. * * For a request like: * diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 98429ed3d001d..e7ce5903163d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -62,6 +62,7 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; + public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; @@ -70,10 +71,10 @@ public class ElasticInferenceService extends SenderService { public ElasticInferenceService( HttpRequestSender.Factory factory, ServiceComponents serviceComponents, - ElasticInferenceServiceComponents eisComponents + ElasticInferenceServiceComponents elasticInferenceServiceComponents ) { super(factory, serviceComponents); - this.elasticInferenceServiceComponents = eisComponents; + this.elasticInferenceServiceComponents = elasticInferenceServiceComponents; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java index 4386964e927d2..c5b2cb693df13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java @@ -7,4 +7,4 @@ package org.elasticsearch.xpack.inference.services.elastic; -public record ElasticInferenceServiceComponents(String eisGatewayUrl) {} +public record ElasticInferenceServiceComponents(String elasticInferenceServiceUrl) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index b0fb6d14ee6f7..324c20d0e48bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -15,6 +15,8 @@ */ public class ElasticInferenceServiceFeature { - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + @Deprecated + public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 8525710c6cf23..bc2daddc2a346 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -14,20 +14,37 @@ public class ElasticInferenceServiceSettings { + @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( + "xpack.inference.elastic.url", + Setting.Property.NodeScope + ); + // Adjust this variable to be volatile, if the setting can be updated at some point in time + @Deprecated private final String eisGatewayUrl; + private final String elasticInferenceServiceUrl; + public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); + elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); + } public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL); + return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); } + @Deprecated public String getEisGatewayUrl() { return eisGatewayUrl; } + + public String getElasticInferenceServiceUrl() { + return elasticInferenceServiceUrl; + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index bbbae736dbeb9..cc69df86933de 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -22,8 +22,11 @@ import java.net.URI; import java.net.URISyntaxException; +import java.util.Locale; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; + public class ElasticInferenceServiceSparseEmbeddingsModel extends ElasticInferenceServiceModel { private final URI uri; @@ -105,9 +108,13 @@ private URI createUri() throws URISyntaxException { switch (modelId) { case ElserModels.ELSER_V2_MODEL -> modelIdUriPath = "ELSERv2"; - default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); + default -> throw new IllegalArgumentException( + String.format(Locale.ROOT, "Unsupported model for %s [%s]", ELASTIC_INFERENCE_SERVICE_IDENTIFIER, modelId) + ); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath); + return new URI( + elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath + ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index e960b0b777f2b..f4f4605c667c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -30,6 +31,7 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -86,11 +88,19 @@ public void parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + IbmWatsonxModel model = createModel( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -112,6 +122,7 @@ private static IbmWatsonxModel createModel( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -123,6 +134,7 @@ private static IbmWatsonxModel createModel( NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); @@ -141,11 +153,17 @@ public IbmWatsonxModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -166,6 +184,7 @@ private static IbmWatsonxModel createModelFromPersistent( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secretSettings, String failureMessage ) { @@ -174,6 +193,7 @@ private static IbmWatsonxModel createModelFromPersistent( taskType, serviceSettings, taskSettings, + chunkingSettings, secretSettings, failureMessage, ConfigurationParseContext.PERSISTENT @@ -185,11 +205,17 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -266,7 +292,8 @@ protected void doChunkedInfer( var batchedRequests = new EmbeddingRequestChunker( input.getInputs(), EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT + EmbeddingRequestChunker.EmbeddingType.FLOAT, + model.getConfigurations().getChunkingSettings() ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = ibmWatsonxModel.accept(getActionCreator(getSender(), getServiceComponents()), taskSettings, inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java index d60e31b5d41c0..6b20e07ecc0a2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java @@ -9,6 +9,7 @@ import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; @@ -40,6 +41,7 @@ public IbmWatsonxEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secrets, ConfigurationParseContext context ) { @@ -49,6 +51,7 @@ public IbmWatsonxEmbeddingsModel( service, IbmWatsonxEmbeddingsServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, + chunkingSettings, DefaultSecretSettings.fromMap(secrets) ); } @@ -64,10 +67,11 @@ public IbmWatsonxEmbeddingsModel(IbmWatsonxEmbeddingsModel model, IbmWatsonxEmbe String service, IbmWatsonxEmbeddingsServiceSettings serviceSettings, TaskSettings taskSettings, + ChunkingSettings chunkingsettings, @Nullable DefaultSecretSettings secrets ) { super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingsettings), new ModelSecrets(secrets), serviceSettings ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index d6c491f2b7cec..f7f37c5bcd15f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -69,6 +70,8 @@ import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -124,6 +127,7 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); service.parseRequestConfig( @@ -150,6 +154,45 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createIbmWatsonxService()) { var failureListener = getModelListenerForException( @@ -235,6 +278,47 @@ public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsMode assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); } } @@ -399,6 +483,73 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists } } + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + public void testInfer_ThrowsErrorWhenModelIsNotIbmWatsonxModel() throws IOException { var sender = mock(Sender.class); @@ -488,7 +639,15 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { } } - public void testChunkedInfer_Batches() throws IOException { + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { + testChunkedInfer_Batches(null); + } + + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { + testChunkedInfer_Batches(createRandomChunkingSettings()); + } + + private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws IOException { var input = List.of("foo", "bar"); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -878,6 +1037,18 @@ private static ActionListener getModelListenerForException(Class excep }); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java index 93fd7e402a0de..33fcd752fbf30 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java @@ -82,6 +82,7 @@ public static IbmWatsonxEmbeddingsModel createModel( null ), EmptyTaskSettings.INSTANCE, + null, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } diff --git a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java index fe406722ae1e2..8d8ad94d608d7 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java +++ b/x-pack/plugin/inference/src/yamlRestTest/java/org/elasticsearch/xpack/inference/InferenceRestIT.java @@ -36,7 +36,7 @@ protected Settings restClientSettings() { var baseSettings = super.restClientSettings(); return Settings.builder() .put(baseSettings) - .put(CLIENT_SOCKET_TIMEOUT, "120s") // Long timeout for model download + .put(CLIENT_SOCKET_TIMEOUT, "300s") // Long timeout for model download .build(); } diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index 8d7a813b206d8..8930ff23fb3b0 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -271,7 +271,7 @@ public void testDateHistogramAggregation() throws IOException { } public void testEsqlSource() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -287,7 +287,7 @@ public void testEsqlSource() throws IOException { } public void testEsqlTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -302,7 +302,7 @@ public void testEsqlTermsAggregation() throws IOException { } public void testEsqlTermsAggregationByMethod() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(100, 200); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java index a9992e168bc66..2ea56b147bf9c 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java @@ -49,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } return restChannel -> { - final String content = request.content().utf8ToString(); + final String content = request.releasableContent().utf8ToString(); client.execute( PutPipelineAction.INSTANCE, new PutPipelineRequest(id, content, request.getXContentType()), diff --git a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java index 216f82552353b..fa5d9428bb0c6 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-constant-keyword/src/main/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapper.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.constantkeyword.mapper; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; @@ -58,7 +57,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.stream.Stream; /** * A {@link FieldMapper} that assigns every document the same value. @@ -356,40 +354,14 @@ protected SyntheticSourceSupport syntheticSourceSupport() { return new SyntheticSourceSupport.Native(SourceLoader.SyntheticFieldLoader.NOTHING); } - var loader = new SourceLoader.SyntheticFieldLoader() { - @Override - public Stream> storedFieldLoaders() { - return Stream.of(); - } - - @Override - public DocValuesLoader docValuesLoader(LeafReader reader, int[] docIdsInLeaf) { - return docId -> true; - } - - @Override - public boolean hasValue() { - return true; - } - - @Override - public void write(XContentBuilder b) throws IOException { - if (fieldType().value != null) { - b.field(leafName(), fieldType().value); - } - } - - @Override - public void reset() { - // NOOP - } - - @Override - public String fieldName() { - return fullPath(); - } - }; + /* + If there was no value in the document, synthetic source should not have the value too. + This is consistent with stored source behavior and is important for scenarios + like reindexing into an index that has a different value of this value in the mapping. - return new SyntheticSourceSupport.Native(loader); + In order to do that we use fallback logic which implements exactly such logic (_source only contains value + if it was in the original document). + */ + return new SyntheticSourceSupport.Fallback(); } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java index 4661fe77e8b11..2b9170afdfd70 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-constant-keyword/src/test/java/org/elasticsearch/xpack/constantkeyword/mapper/ConstantKeywordFieldMapperTests.java @@ -333,6 +333,17 @@ public void testNullValueSyntheticSource() throws IOException { assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); } + public void testNoValueInDocumentSyntheticSource() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("field"); + b.field("type", "constant_keyword"); + b.field("value", randomAlphaOfLength(5)); + b.endObject(); + })).documentMapper(); + + assertThat(syntheticSource(mapper, b -> {}), equalTo("{}")); + } + @Override protected boolean supportsEmptyInputArray() { return false; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java index 5b1ed7c954fe9..e13b1e0033191 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregator.java @@ -121,21 +121,22 @@ public InternalAggregation[] buildAggregations(LongArray ordsToCollect) throws I continue; } int size = (int) Math.min(bucketOrds.bucketsInOrd(ordIdx), bucketCountThresholds.getShardSize()); + checkRealMemoryCBForInternalBucket(); topBucketsPerOrd.set(ordIdx, categorizer.toOrderedBuckets(size)); } buildSubAggsForAllBuckets(topBucketsPerOrd, Bucket::getBucketOrd, Bucket::setAggregations); - InternalAggregation[] results = new InternalAggregation[Math.toIntExact(ordsToCollect.size())]; - for (int ordIdx = 0; ordIdx < results.length; ordIdx++) { - results[ordIdx] = new InternalCategorizationAggregation( + + return buildAggregations( + Math.toIntExact(ordsToCollect.size()), + ordIdx -> new InternalCategorizationAggregation( name, bucketCountThresholds.getRequiredSize(), bucketCountThresholds.getMinDocCount(), similarityThreshold, metadata(), Arrays.asList(topBucketsPerOrd.get(ordIdx)) - ); - } - return results; + ) + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 7ef7a8f4e6dd5..95b6a18182f9b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -142,8 +142,7 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.DOC_COUNT.getPreferredName(), serializableCategory.getNumMatches()); builder.field(CommonFields.KEY.getPreferredName()); @@ -152,7 +151,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName(), serializableCategory.maxMatchingStringLen()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } BucketKey getRawKey() { @@ -280,7 +278,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java index c97166ac6fd80..39bdb69d4da40 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java @@ -12,12 +12,13 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket { +public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket implements ToXContent { private final Object key; private final long docCount; private final InternalAggregations aggregations; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index bbe5bea691c35..5dd7dbbffaa61 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -194,7 +194,7 @@ Reader normalize(CharSequence str) { if (charDelta < 0) { // normalised form is shorter int lastDiff = getLastCumulativeDiff(); - addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + addOffCorrectMap(normalizedCharPos, lastDiff - charDelta); } else if (charDelta > 0) { // inserted chars, add the offset in the output stream int lastDiff = getLastCumulativeDiff(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java index 31deac066cba2..01821f5582471 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/UnigramTokenizer.java @@ -367,8 +367,10 @@ List tokenize(CharSequence inputSequence, IntToIntFuncti new DelimitedToken.Encoded( Strings.format("<0x%02X>", bytes[i]), pieces[i], + // even though we are changing the number of characters in the output, we don't + // need to change the offsets. The offsets refer to the input characters offsetCorrection.apply(node.startsAtCharPos), - offsetCorrection.apply(startsAtBytes + i) + offsetCorrection.apply(endsAtChars) ) ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 561076c302eda..1604c47ac4754 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -36,7 +36,7 @@ public final class AggregationTestUtils { private AggregationTestUtils() {} static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { - return new InternalHistogram.Bucket(timestamp, docCount, false, DocValueFormat.RAW, createAggs(subAggregations)); + return new InternalHistogram.Bucket(timestamp, docCount, DocValueFormat.RAW, createAggs(subAggregations)); } static InternalComposite.InternalBucket createCompositeBucket( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java index bbe509da67452..a8461de8630ae 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java @@ -94,6 +94,20 @@ public void testTokenize() throws IOException { } } + public void testTokenizeWithHiddenControlCharacters() throws IOException { + try ( + DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( + TEST_CASE_VOCAB, + TEST_CASE_SCORES, + new DebertaV2Tokenization(false, false, null, Tokenization.Truncate.NONE, -1) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("\u009F\u008Fz", Tokenization.Truncate.NONE, -1, 0, null).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁", "z")); + + } + } + public void testSurrogatePair() throws IOException { try ( DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ba25a774ff540..e33c1cc30f355 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -444,20 +444,14 @@ private static InternalAggregation unrollMultiBucket( long key = ((InternalDateHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalDateHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalDateHistogram.Bucket( - key, - bucketCount, - ((InternalDateHistogram.Bucket) bucket).getKeyed(), - formatter, - subAggs - ); + return new InternalDateHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof InternalHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalHistogram.Bucket(key, bucketCount, ((InternalHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); + return new InternalHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index df97c489cc6b7..bfff63442281d 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -358,6 +358,7 @@ public class Constants { "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/features", "cluster:monitor/nodes/hot_threads", + "cluster:monitor/nodes/index_mode_stats", "cluster:monitor/nodes/info", "cluster:monitor/nodes/stats", "cluster:monitor/nodes/usage", @@ -399,6 +400,7 @@ public class Constants { "cluster:monitor/xpack/info/frozen_indices", "cluster:monitor/xpack/info/graph", "cluster:monitor/xpack/info/ilm", + "cluster:monitor/xpack/info/logsdb", "cluster:monitor/xpack/info/logstash", "cluster:monitor/xpack/info/ml", "cluster:monitor/xpack/info/monitoring", @@ -463,6 +465,7 @@ public class Constants { "cluster:monitor/xpack/usage/health_api", "cluster:monitor/xpack/usage/ilm", "cluster:monitor/xpack/usage/inference", + "cluster:monitor/xpack/usage/logsdb", "cluster:monitor/xpack/usage/logstash", "cluster:monitor/xpack/usage/ml", "cluster:monitor/xpack/usage/monitoring", @@ -488,6 +491,7 @@ public class Constants { "indices:admin/block/add[s]", "indices:admin/cache/clear", "indices:admin/data_stream/lazy_rollover", + "indices:admin/data_stream/reindex", "indices:internal/admin/ccr/restore/file_chunk/get", "indices:internal/admin/ccr/restore/session/clear", "indices:internal/admin/ccr/restore/session/put", diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java index bc01b0693af0a..2851af1461012 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java @@ -107,7 +107,7 @@ public void testThatHttpFailsWithoutSslClientAuth() throws IOException { if (inFipsJvm()) { Throwable t = ExceptionsHelper.unwrap(e, CertificateException.class); assertThat(t, instanceOf(CertificateException.class)); - assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + assertThat(t.getMessage(), containsString("Unable to construct a valid chain")); } else { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java index 13e3e40887d89..429b632cdac18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java @@ -27,10 +27,11 @@ public class AuditUtil { public static String restRequestContent(RestRequest request) { if (request.hasContent()) { + var content = request.releasableContent(); try { - return XContentHelper.convertToJson(request.content(), false, false, request.getXContentType()); + return XContentHelper.convertToJson(content, false, false, request.getXContentType()); } catch (IOException ioe) { - return "Invalid Format: " + request.content().utf8ToString(); + return "Invalid Format: " + content.utf8ToString(); } } return ""; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index f0405e42f1f22..df21f5d4eeb0b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -75,7 +75,7 @@ protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClie return innerPrepareRequest(request, client); } else { request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); // mark content consumed return channel -> channel.sendResponse(new RestResponse(channel, failedFeature)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 5adc1e351931d..3be40c280874d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -2614,7 +2614,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { @@ -2643,8 +2643,8 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { - checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.getHttpRequest().body().asFull().bytes().utf8ToString()); + if (includeRequestBody && request.hasContent()) { + checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { checkedFields.put(LoggingAuditTrail.URL_QUERY_FIELD_NAME, "foo=bar&evac=true"); @@ -2672,7 +2672,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content().utf8ToString())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index c5c5e14934408..e381663d4174e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -571,7 +571,11 @@ public void testClientChannelUsesSeparateSslConfigurationForRemoteCluster() thro final ConnectTransportException e = openConnectionExpectFailure(qcService, node, connectionProfile); assertThat( e.getRootCause().getMessage(), - anyOf(containsString("unable to find valid certification path"), containsString("Unable to find certificate chain")) + anyOf( + containsString("unable to find valid certification path"), + containsString("Unable to find certificate chain"), + containsString("Unable to construct a valid chain") + ) ); } diff --git a/x-pack/plugin/sql/qa/jdbc/security/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/build.gradle index c446755e91929..82510285cb996 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/build.gradle @@ -1,4 +1,8 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask +import org.elasticsearch.gradle.testclusters.TestClusterValueSource +import org.elasticsearch.gradle.testclusters.TestClustersPlugin +import org.elasticsearch.gradle.testclusters.TestClustersRegistry +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,7 +15,10 @@ dependencies { Project mainProject = project + subprojects { + def clusterPath = getPath() + // Use tests from the root security qa project in subprojects configurations.create('testArtifacts').transitive(false) @@ -46,6 +53,17 @@ subprojects { dependsOn copyTestClasses classpath += configurations.testArtifacts testClassesDirs = project.files(testArtifactsDir) + + Provider serviceProvider = GradleUtils.getBuildService( + project.gradle.sharedServices, + TestClustersPlugin.REGISTRY_SERVICE_NAME + ) + project.getProviders().of(TestClusterValueSource.class) { + it.parameters.path.set(clusterPath) + it.parameters.clusterName.set("javaRestTest") + it.parameters.service = serviceProvider + } + nonInputProperties.systemProperty 'tests.audit.logfile', "${-> testClusters.javaRestTest.singleNode().getAuditLog()}" nonInputProperties.systemProperty 'tests.audit.yesterday.logfile', diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 411a4cda868f0..f9fed2b8f6a7d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -38,7 +38,6 @@ import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; import org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; @@ -360,11 +359,6 @@ static class ImplicitGroupActionListener extends BaseAggActionListener { private static final List EMPTY_BUCKET = singletonList(new Bucket() { - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new SqlIllegalArgumentException("No group-by/aggs defined"); - } - @Override public Object getKey() { throw new SqlIllegalArgumentException("No group-by/aggs defined"); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java index cb832cbd4b2d4..8f8f5917ae123 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java @@ -8,9 +8,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; import java.util.Map; class TestBucket implements Bucket { @@ -25,11 +23,6 @@ class TestBucket implements Bucket { this.aggs = aggs; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public Map getKey() { return key; diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml index 83234901ae8f2..35cfbac5e3439 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/140_metadata.yml @@ -170,4 +170,43 @@ setup: catch: /cannot sort on _source/ esql.query: body: - query: 'FROM test metadata _source | sort _source' + query: 'FROM test metadata _source | SORT _source' + +--- +"sort returning _source is allowed": + - requires: + test_runner_features: [capabilities] + capabilities: + - method: POST + path: /_query + parameters: [] + capabilities: [sort_returning_source_ok] + reason: "Sorts returning _source should be ok, but weren't in older versions" + - do: + esql.query: + body: + query: 'FROM test METADATA _source | SORT case ASC | KEEP case, _source | LIMIT 5' + - length: { columns: 2 } + - length: { values: 3 } + - match: {columns.0.name: "case"} + - match: {columns.0.type: "keyword"} + - match: {columns.1.name: "_source"} + - match: {columns.1.type: "_source"} + - match: {values.0.0: "all_ignored"} + - match: {values.0.1: { + "integer" : "not-an-integer", + "keyword" : "long-keyword", + "case" : "all_ignored" + }} + - match: {values.1.0: "integer_ignored"} + - match: {values.1.1: { + "integer" : "not-an-integer", + "keyword" : "ok", + "case" : "integer_ignored" + }} + - match: {values.2.0: "ok"} + - match: {values.2.1: { + "integer" : 10, + "keyword" : "ok", + "case" : "ok" + }} diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java index 681ec38e9a57a..7359071996cc8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java @@ -918,14 +918,14 @@ public void testRangeAggExtractor() { Aggregation agg = createRangeAgg( "p_agg", List.of( - new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, false, DocValueFormat.RAW) + new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, DocValueFormat.RAW) ) ); assertThat( diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java index 1691a464d8061..99fb626ad9474 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/bench/WatcherScheduleEngineBenchmark.java @@ -16,11 +16,13 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.MockNode; import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.PluginsLoader; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; @@ -96,18 +98,18 @@ public static void main(String[] args) throws Exception { ); System.out.println("and heap_max=" + JvmInfo.jvmInfo().getMem().getHeapMax()); + Environment internalNodeEnv = InternalSettingsPreparer.prepareEnvironment( + Settings.builder().put(SETTINGS).put("node.data", false).build(), + emptyMap(), + null, + () -> { + throw new IllegalArgumentException("settings must have [node.name]"); + } + ); + // First clean everything and index the watcher (but not via put alert api!) try ( - Node node = new Node( - InternalSettingsPreparer.prepareEnvironment( - Settings.builder().put(SETTINGS).put("node.data", false).build(), - emptyMap(), - null, - () -> { - throw new IllegalArgumentException("settings must have [node.name]"); - } - ) - ).start() + Node node = new Node(internalNodeEnv, new PluginsLoader(internalNodeEnv.modulesFile(), internalNodeEnv.pluginsFile())).start() ) { final Client client = node.client(); ClusterHealthResponse response = client.admin().cluster().prepareHealth(TimeValue.THIRTY_SECONDS).setWaitForNodes("2").get(); diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 78cfc0f688e4a..ecd02ac9d209f 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.BwcVersions -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.transform.UnzipTransform